🔥 rm venv
This commit is contained in:
parent
d39405a7e4
commit
69fd92dc77
Binary file not shown.
@ -1,84 +0,0 @@
|
||||
# This file must be used with "source bin/activate" *from bash*
|
||||
# you cannot run it directly
|
||||
|
||||
|
||||
if [ "${BASH_SOURCE-}" = "$0" ]; then
|
||||
echo "You must source this script: \$ source $0" >&2
|
||||
exit 33
|
||||
fi
|
||||
|
||||
deactivate () {
|
||||
unset -f pydoc >/dev/null 2>&1
|
||||
|
||||
# reset old environment variables
|
||||
# ! [ -z ${VAR+_} ] returns true if VAR is declared at all
|
||||
if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then
|
||||
PATH="$_OLD_VIRTUAL_PATH"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
|
||||
PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
|
||||
hash -r 2>/dev/null
|
||||
fi
|
||||
|
||||
if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
|
||||
PS1="$_OLD_VIRTUAL_PS1"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
if [ ! "${1-}" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
VIRTUAL_ENV='/home/ilya/git/contact_mailer/venv'
|
||||
export VIRTUAL_ENV
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
if ! [ -z "${PYTHONHOME+_}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1-}"
|
||||
if [ "x" != x ] ; then
|
||||
PS1="${PS1-}"
|
||||
else
|
||||
PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
|
||||
fi
|
||||
export PS1
|
||||
fi
|
||||
|
||||
# Make sure to unalias pydoc if it's already there
|
||||
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true
|
||||
|
||||
pydoc () {
|
||||
python -m pydoc "$@"
|
||||
}
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
|
||||
hash -r 2>/dev/null
|
||||
fi
|
@ -1,55 +0,0 @@
|
||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
|
||||
set newline='\
|
||||
'
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH:q" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT:q" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate && unalias pydoc'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV '/home/ilya/git/contact_mailer/venv'
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH:q"
|
||||
setenv PATH "$VIRTUAL_ENV:q/bin:$PATH:q"
|
||||
|
||||
|
||||
|
||||
if ('' != "") then
|
||||
set env_name = ''
|
||||
else
|
||||
set env_name = '('"$VIRTUAL_ENV:t:q"') '
|
||||
endif
|
||||
|
||||
if ( $?VIRTUAL_ENV_DISABLE_PROMPT ) then
|
||||
if ( $VIRTUAL_ENV_DISABLE_PROMPT == "" ) then
|
||||
set do_prompt = "1"
|
||||
else
|
||||
set do_prompt = "0"
|
||||
endif
|
||||
else
|
||||
set do_prompt = "1"
|
||||
endif
|
||||
|
||||
if ( $do_prompt == "1" ) then
|
||||
# Could be in a non-interactive environment,
|
||||
# in which case, $prompt is undefined and we wouldn't
|
||||
# care about the prompt anyway.
|
||||
if ( $?prompt ) then
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt:q"
|
||||
if ( "$prompt:q" =~ *"$newline:q"* ) then
|
||||
:
|
||||
else
|
||||
set prompt = "$env_name:q$prompt:q"
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
|
||||
unset env_name
|
||||
unset do_prompt
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
@ -1,100 +0,0 @@
|
||||
# This file must be used using `source bin/activate.fish` *within a running fish ( http://fishshell.com ) session*.
|
||||
# Do not run it directly.
|
||||
|
||||
function _bashify_path -d "Converts a fish path to something bash can recognize"
|
||||
set fishy_path $argv
|
||||
set bashy_path $fishy_path[1]
|
||||
for path_part in $fishy_path[2..-1]
|
||||
set bashy_path "$bashy_path:$path_part"
|
||||
end
|
||||
echo $bashy_path
|
||||
end
|
||||
|
||||
function _fishify_path -d "Converts a bash path to something fish can recognize"
|
||||
echo $argv | tr ':' '\n'
|
||||
end
|
||||
|
||||
function deactivate -d 'Exit virtualenv mode and return to the normal environment.'
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling
|
||||
if test (echo $FISH_VERSION | head -c 1) -lt 3
|
||||
set -gx PATH (_fishify_path "$_OLD_VIRTUAL_PATH")
|
||||
else
|
||||
set -gx PATH "$_OLD_VIRTUAL_PATH"
|
||||
end
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
and functions -q _old_fish_prompt
|
||||
# Set an empty local `$fish_function_path` to allow the removal of `fish_prompt` using `functions -e`.
|
||||
set -l fish_function_path
|
||||
|
||||
# Erase virtualenv's `fish_prompt` and restore the original.
|
||||
functions -e fish_prompt
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
|
||||
if test "$argv[1]" != 'nondestructive'
|
||||
# Self-destruct!
|
||||
functions -e pydoc
|
||||
functions -e deactivate
|
||||
functions -e _bashify_path
|
||||
functions -e _fishify_path
|
||||
end
|
||||
end
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV '/home/ilya/git/contact_mailer/venv'
|
||||
|
||||
# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling
|
||||
if test (echo $FISH_VERSION | head -c 1) -lt 3
|
||||
set -gx _OLD_VIRTUAL_PATH (_bashify_path $PATH)
|
||||
else
|
||||
set -gx _OLD_VIRTUAL_PATH "$PATH"
|
||||
end
|
||||
set -gx PATH "$VIRTUAL_ENV"'/bin' $PATH
|
||||
|
||||
# Unset `$PYTHONHOME` if set.
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
function pydoc
|
||||
python -m pydoc $argv
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# Copy the current `fish_prompt` function as `_old_fish_prompt`.
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
function fish_prompt
|
||||
# Run the user's prompt first; it might depend on (pipe)status.
|
||||
set -l prompt (_old_fish_prompt)
|
||||
|
||||
# Prompt override provided?
|
||||
# If not, just prepend the environment name.
|
||||
if test -n ''
|
||||
printf '%s%s' '' (set_color normal)
|
||||
else
|
||||
printf '%s(%s) ' (set_color normal) (basename "$VIRTUAL_ENV")
|
||||
end
|
||||
|
||||
string join -- \n $prompt # handle multi-line prompts
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
end
|
@ -1,60 +0,0 @@
|
||||
$script:THIS_PATH = $myinvocation.mycommand.path
|
||||
$script:BASE_DIR = Split-Path (Resolve-Path "$THIS_PATH/..") -Parent
|
||||
|
||||
function global:deactivate([switch] $NonDestructive) {
|
||||
if (Test-Path variable:_OLD_VIRTUAL_PATH) {
|
||||
$env:PATH = $variable:_OLD_VIRTUAL_PATH
|
||||
Remove-Variable "_OLD_VIRTUAL_PATH" -Scope global
|
||||
}
|
||||
|
||||
if (Test-Path function:_old_virtual_prompt) {
|
||||
$function:prompt = $function:_old_virtual_prompt
|
||||
Remove-Item function:\_old_virtual_prompt
|
||||
}
|
||||
|
||||
if ($env:VIRTUAL_ENV) {
|
||||
Remove-Item env:VIRTUAL_ENV -ErrorAction SilentlyContinue
|
||||
}
|
||||
|
||||
if (!$NonDestructive) {
|
||||
# Self destruct!
|
||||
Remove-Item function:deactivate
|
||||
Remove-Item function:pydoc
|
||||
}
|
||||
}
|
||||
|
||||
function global:pydoc {
|
||||
python -m pydoc $args
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate -nondestructive
|
||||
|
||||
$VIRTUAL_ENV = $BASE_DIR
|
||||
$env:VIRTUAL_ENV = $VIRTUAL_ENV
|
||||
|
||||
New-Variable -Scope global -Name _OLD_VIRTUAL_PATH -Value $env:PATH
|
||||
|
||||
$env:PATH = "$env:VIRTUAL_ENV/bin:" + $env:PATH
|
||||
if (!$env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||
function global:_old_virtual_prompt {
|
||||
""
|
||||
}
|
||||
$function:_old_virtual_prompt = $function:prompt
|
||||
|
||||
if ("" -ne "") {
|
||||
function global:prompt {
|
||||
# Add the custom prefix to the existing prompt
|
||||
$previous_prompt_value = & $function:_old_virtual_prompt
|
||||
("" + $previous_prompt_value)
|
||||
}
|
||||
}
|
||||
else {
|
||||
function global:prompt {
|
||||
# Add a prefix to the current prompt, but don't discard it.
|
||||
$previous_prompt_value = & $function:_old_virtual_prompt
|
||||
$new_prompt_value = "($( Split-Path $env:VIRTUAL_ENV -Leaf )) "
|
||||
($new_prompt_value + $previous_prompt_value)
|
||||
}
|
||||
}
|
||||
}
|
@ -1,46 +0,0 @@
|
||||
"""Xonsh activate script for virtualenv"""
|
||||
from xonsh.tools import get_sep as _get_sep
|
||||
|
||||
def _deactivate(args):
|
||||
if "pydoc" in aliases:
|
||||
del aliases["pydoc"]
|
||||
|
||||
if ${...}.get("_OLD_VIRTUAL_PATH", ""):
|
||||
$PATH = $_OLD_VIRTUAL_PATH
|
||||
del $_OLD_VIRTUAL_PATH
|
||||
|
||||
if ${...}.get("_OLD_VIRTUAL_PYTHONHOME", ""):
|
||||
$PYTHONHOME = $_OLD_VIRTUAL_PYTHONHOME
|
||||
del $_OLD_VIRTUAL_PYTHONHOME
|
||||
|
||||
if "VIRTUAL_ENV" in ${...}:
|
||||
del $VIRTUAL_ENV
|
||||
|
||||
if "VIRTUAL_ENV_PROMPT" in ${...}:
|
||||
del $VIRTUAL_ENV_PROMPT
|
||||
|
||||
if "nondestructive" not in args:
|
||||
# Self destruct!
|
||||
del aliases["deactivate"]
|
||||
|
||||
|
||||
# unset irrelevant variables
|
||||
_deactivate(["nondestructive"])
|
||||
aliases["deactivate"] = _deactivate
|
||||
|
||||
$VIRTUAL_ENV = r"/home/ilya/git/contact_mailer/venv"
|
||||
|
||||
$_OLD_VIRTUAL_PATH = $PATH
|
||||
$PATH = $PATH[:]
|
||||
$PATH.add($VIRTUAL_ENV + _get_sep() + "bin", front=True, replace=True)
|
||||
|
||||
if ${...}.get("PYTHONHOME", ""):
|
||||
# unset PYTHONHOME if set
|
||||
$_OLD_VIRTUAL_PYTHONHOME = $PYTHONHOME
|
||||
del $PYTHONHOME
|
||||
|
||||
$VIRTUAL_ENV_PROMPT = ""
|
||||
if not $VIRTUAL_ENV_PROMPT:
|
||||
del $VIRTUAL_ENV_PROMPT
|
||||
|
||||
aliases["pydoc"] = ["python", "-m", "pydoc"]
|
@ -1,32 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Activate virtualenv for current interpreter:
|
||||
|
||||
Use exec(open(this_file).read(), {'__file__': this_file}).
|
||||
|
||||
This can be used when you must use an existing Python interpreter, not the virtualenv bin/python.
|
||||
"""
|
||||
import os
|
||||
import site
|
||||
import sys
|
||||
|
||||
try:
|
||||
abs_file = os.path.abspath(__file__)
|
||||
except NameError:
|
||||
raise AssertionError("You must use exec(open(this_file).read(), {'__file__': this_file}))")
|
||||
|
||||
bin_dir = os.path.dirname(abs_file)
|
||||
base = bin_dir[: -len("bin") - 1] # strip away the bin part from the __file__, plus the path separator
|
||||
|
||||
# prepend bin to PATH (this file is inside the bin directory)
|
||||
os.environ["PATH"] = os.pathsep.join([bin_dir] + os.environ.get("PATH", "").split(os.pathsep))
|
||||
os.environ["VIRTUAL_ENV"] = base # virtual env is right above bin directory
|
||||
|
||||
# add the virtual environments libraries to the host python import mechanism
|
||||
prev_length = len(sys.path)
|
||||
for lib in "../lib/python3.8/site-packages".split(os.pathsep):
|
||||
path = os.path.realpath(os.path.join(bin_dir, lib))
|
||||
site.addsitedir(path.decode("utf-8") if "" else path)
|
||||
sys.path[:] = sys.path[prev_length:] + sys.path[0:prev_length]
|
||||
|
||||
sys.real_prefix = sys.prefix
|
||||
sys.prefix = base
|
File diff suppressed because it is too large
Load Diff
@ -1,8 +0,0 @@
|
||||
#!/home/ilya/git/contact_mailer/venv/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from dotenv.cli import cli
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(cli())
|
@ -1,8 +0,0 @@
|
||||
#!/home/ilya/git/contact_mailer/venv/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from setuptools.command.easy_install import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
@ -1,8 +0,0 @@
|
||||
#!/home/ilya/git/contact_mailer/venv/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from setuptools.command.easy_install import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
@ -1,8 +0,0 @@
|
||||
#!/home/ilya/git/contact_mailer/venv/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from setuptools.command.easy_install import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
@ -1,8 +0,0 @@
|
||||
#!/home/ilya/git/contact_mailer/venv/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
@ -1,8 +0,0 @@
|
||||
#!/home/ilya/git/contact_mailer/venv/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
@ -1,8 +0,0 @@
|
||||
#!/home/ilya/git/contact_mailer/venv/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
@ -1,8 +0,0 @@
|
||||
#!/home/ilya/git/contact_mailer/venv/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
@ -1 +0,0 @@
|
||||
/usr/bin/python
|
@ -1 +0,0 @@
|
||||
python
|
@ -1 +0,0 @@
|
||||
python
|
@ -1,8 +0,0 @@
|
||||
#!/home/ilya/git/contact_mailer/venv/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from wheel.cli import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
@ -1,8 +0,0 @@
|
||||
#!/home/ilya/git/contact_mailer/venv/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from wheel.cli import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
@ -1,8 +0,0 @@
|
||||
#!/home/ilya/git/contact_mailer/venv/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from wheel.cli import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
Binary file not shown.
Binary file not shown.
@ -1 +0,0 @@
|
||||
import _virtualenv
|
@ -1,115 +0,0 @@
|
||||
"""Patches that are applied at runtime to the virtual environment"""
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
VIRTUALENV_PATCH_FILE = os.path.join(__file__)
|
||||
|
||||
|
||||
def patch_dist(dist):
|
||||
"""
|
||||
Distutils allows user to configure some arguments via a configuration file:
|
||||
https://docs.python.org/3/install/index.html#distutils-configuration-files
|
||||
|
||||
Some of this arguments though don't make sense in context of the virtual environment files, let's fix them up.
|
||||
"""
|
||||
# we cannot allow some install config as that would get packages installed outside of the virtual environment
|
||||
old_parse_config_files = dist.Distribution.parse_config_files
|
||||
|
||||
def parse_config_files(self, *args, **kwargs):
|
||||
result = old_parse_config_files(self, *args, **kwargs)
|
||||
install = self.get_option_dict("install")
|
||||
|
||||
if "prefix" in install: # the prefix governs where to install the libraries
|
||||
install["prefix"] = VIRTUALENV_PATCH_FILE, os.path.abspath(sys.prefix)
|
||||
for base in ("purelib", "platlib", "headers", "scripts", "data"):
|
||||
key = "install_{}".format(base)
|
||||
if key in install: # do not allow global configs to hijack venv paths
|
||||
install.pop(key, None)
|
||||
return result
|
||||
|
||||
dist.Distribution.parse_config_files = parse_config_files
|
||||
|
||||
|
||||
# Import hook that patches some modules to ignore configuration values that break package installation in case
|
||||
# of virtual environments.
|
||||
_DISTUTILS_PATCH = "distutils.dist", "setuptools.dist"
|
||||
if sys.version_info > (3, 4):
|
||||
# https://docs.python.org/3/library/importlib.html#setting-up-an-importer
|
||||
from importlib.abc import MetaPathFinder
|
||||
from importlib.util import find_spec
|
||||
from threading import Lock
|
||||
from functools import partial
|
||||
|
||||
class _Finder(MetaPathFinder):
|
||||
"""A meta path finder that allows patching the imported distutils modules"""
|
||||
|
||||
fullname = None
|
||||
lock = Lock()
|
||||
|
||||
def find_spec(self, fullname, path, target=None):
|
||||
if fullname in _DISTUTILS_PATCH and self.fullname is None:
|
||||
with self.lock:
|
||||
self.fullname = fullname
|
||||
try:
|
||||
spec = find_spec(fullname, path)
|
||||
if spec is not None:
|
||||
# https://www.python.org/dev/peps/pep-0451/#how-loading-will-work
|
||||
is_new_api = hasattr(spec.loader, "exec_module")
|
||||
func_name = "exec_module" if is_new_api else "load_module"
|
||||
old = getattr(spec.loader, func_name)
|
||||
func = self.exec_module if is_new_api else self.load_module
|
||||
if old is not func:
|
||||
try:
|
||||
setattr(spec.loader, func_name, partial(func, old))
|
||||
except AttributeError:
|
||||
pass # C-Extension loaders are r/o such as zipimporter with <python 3.7
|
||||
return spec
|
||||
finally:
|
||||
self.fullname = None
|
||||
|
||||
@staticmethod
|
||||
def exec_module(old, module):
|
||||
old(module)
|
||||
if module.__name__ in _DISTUTILS_PATCH:
|
||||
patch_dist(module)
|
||||
|
||||
@staticmethod
|
||||
def load_module(old, name):
|
||||
module = old(name)
|
||||
if module.__name__ in _DISTUTILS_PATCH:
|
||||
patch_dist(module)
|
||||
return module
|
||||
|
||||
sys.meta_path.insert(0, _Finder())
|
||||
else:
|
||||
# https://www.python.org/dev/peps/pep-0302/
|
||||
from imp import find_module
|
||||
from pkgutil import ImpImporter, ImpLoader
|
||||
|
||||
class _VirtualenvImporter(object, ImpImporter):
|
||||
def __init__(self, path=None):
|
||||
object.__init__(self)
|
||||
ImpImporter.__init__(self, path)
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
if fullname in _DISTUTILS_PATCH:
|
||||
try:
|
||||
return _VirtualenvLoader(fullname, *find_module(fullname.split(".")[-1], path))
|
||||
except ImportError:
|
||||
pass
|
||||
return None
|
||||
|
||||
class _VirtualenvLoader(object, ImpLoader):
|
||||
def __init__(self, fullname, file, filename, etc):
|
||||
object.__init__(self)
|
||||
ImpLoader.__init__(self, fullname, file, filename, etc)
|
||||
|
||||
def load_module(self, fullname):
|
||||
module = super(_VirtualenvLoader, self).load_module(fullname)
|
||||
patch_dist(module)
|
||||
module.__loader__ = None # distlib fallback
|
||||
return module
|
||||
|
||||
sys.meta_path.append(_VirtualenvImporter())
|
@ -1,64 +0,0 @@
|
||||
Bottle is written and maintained by Marcel Hellkamp <marc@bottlepy.org>.
|
||||
|
||||
Thanks to all the people who found bugs, sent patches, spread the word, helped each other on the mailing-list and made this project possible. I hope the following (alphabetically sorted) list is complete. If you miss your name on that list (or want your name removed) please :doc:`tell me <contact>` or add it yourself.
|
||||
|
||||
* acasajus
|
||||
* Adam R. Smith
|
||||
* Alexey Borzenkov
|
||||
* Alexis Daboville
|
||||
* Anton I. Sipos
|
||||
* Anton Kolechkin
|
||||
* apexi200sx
|
||||
* apheage
|
||||
* BillMa
|
||||
* Brad Greenlee
|
||||
* Brandon Gilmore
|
||||
* Branko Vukelic
|
||||
* Brian Sierakowski
|
||||
* Brian Wickman
|
||||
* Carl Scharenberg
|
||||
* Damien Degois
|
||||
* David Buxton
|
||||
* Duane Johnson
|
||||
* fcamel
|
||||
* Frank Murphy
|
||||
* Frederic Junod
|
||||
* goldfaber3012
|
||||
* Greg Milby
|
||||
* gstein
|
||||
* Ian Davis
|
||||
* Itamar Nabriski
|
||||
* Iuri de Silvio
|
||||
* Jaimie Murdock
|
||||
* Jeff Nichols
|
||||
* Jeremy Kelley
|
||||
* joegester
|
||||
* Johannes Krampf
|
||||
* Jonas Haag
|
||||
* Joshua Roesslein
|
||||
* Karl
|
||||
* Kevin Zuber
|
||||
* Kraken
|
||||
* Kyle Fritz
|
||||
* m35
|
||||
* Marcos Neves
|
||||
* masklinn
|
||||
* Michael Labbe
|
||||
* Michael Soulier
|
||||
* `reddit <http://reddit.com/r/python>`_
|
||||
* Nicolas Vanhoren
|
||||
* Robert Rollins
|
||||
* rogererens
|
||||
* rwxrwx
|
||||
* Santiago Gala
|
||||
* Sean M. Collins
|
||||
* Sebastian Wollrath
|
||||
* Seth
|
||||
* Sigurd Høgsbro
|
||||
* Stuart Rackham
|
||||
* Sun Ning
|
||||
* Tomás A. Schertel
|
||||
* Tristan Zajonc
|
||||
* voltron
|
||||
* Wieland Hoffmann
|
||||
* zombat
|
@ -1 +0,0 @@
|
||||
pip
|
@ -1,19 +0,0 @@
|
||||
Copyright (c) 2012, Marcel Hellkamp.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
@ -1,43 +0,0 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: bottle
|
||||
Version: 0.12.18
|
||||
Summary: Fast and simple WSGI-framework for small web-applications.
|
||||
Home-page: http://bottlepy.org/
|
||||
Author: Marcel Hellkamp
|
||||
Author-email: marc@gsites.de
|
||||
License: MIT
|
||||
Platform: any
|
||||
Classifier: Development Status :: 4 - Beta
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Server
|
||||
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
||||
Classifier: Programming Language :: Python :: 2.5
|
||||
Classifier: Programming Language :: Python :: 2.6
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.2
|
||||
Classifier: Programming Language :: Python :: 3.3
|
||||
Classifier: Programming Language :: Python :: 3.4
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
|
||||
|
||||
Bottle is a fast and simple micro-framework for small web applications. It
|
||||
offers request dispatching (Routes) with url parameter support, templates,
|
||||
a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and
|
||||
template engines - all in a single file and with no dependencies other than the
|
||||
Python Standard Library.
|
||||
|
||||
Homepage and documentation: http://bottlepy.org/
|
||||
|
||||
Copyright (c) 2016, Marcel Hellkamp.
|
||||
License: MIT (see LICENSE for details)
|
||||
|
||||
|
@ -1,11 +0,0 @@
|
||||
../../../bin/__pycache__/bottle.cpython-38.pyc,,
|
||||
../../../bin/bottle.py,sha256=LRyG3HZAhbM_W3aktpqm_o7BMawyg95jcWPzjUhK9EY,150567
|
||||
__pycache__/bottle.cpython-38.pyc,,
|
||||
bottle-0.12.18.dist-info/AUTHORS,sha256=A0Y_uWygTzQczXdwcMI8h6XqqWns2pGsJnZOGwu_IPo,1308
|
||||
bottle-0.12.18.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
bottle-0.12.18.dist-info/LICENSE,sha256=0OchHxw8GhxW850YvLB_J_SAyKlVJhd1bdo6M1kzuKY,1061
|
||||
bottle-0.12.18.dist-info/METADATA,sha256=l7EXmdbmO7IVvHC4s9PZDZ6S6w8iHeJPOjTcDJVz0c0,1794
|
||||
bottle-0.12.18.dist-info/RECORD,,
|
||||
bottle-0.12.18.dist-info/WHEEL,sha256=p46_5Uhzqz6AzeSosiOnxK-zmFja1i22CrQCjmYe8ec,92
|
||||
bottle-0.12.18.dist-info/top_level.txt,sha256=cK8mpC1WUvVJAVL1XsjCoCGkD-0Yc-pcrqfH0fRXkhg,7
|
||||
bottle.py,sha256=mDSCc0MX7-XwbF1VuZ1N58O3pjiu_lRpopf6d9ONX-g,150580
|
@ -1,5 +0,0 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.33.6)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
@ -1 +0,0 @@
|
||||
bottle
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,245 +0,0 @@
|
||||
# Copyright 2009-present MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from bson.py3compat import PY3
|
||||
|
||||
"""Tools for representing BSON binary data.
|
||||
"""
|
||||
|
||||
BINARY_SUBTYPE = 0
|
||||
"""BSON binary subtype for binary data.
|
||||
|
||||
This is the default subtype for binary data.
|
||||
"""
|
||||
|
||||
FUNCTION_SUBTYPE = 1
|
||||
"""BSON binary subtype for functions.
|
||||
"""
|
||||
|
||||
OLD_BINARY_SUBTYPE = 2
|
||||
"""Old BSON binary subtype for binary data.
|
||||
|
||||
This is the old default subtype, the current
|
||||
default is :data:`BINARY_SUBTYPE`.
|
||||
"""
|
||||
|
||||
OLD_UUID_SUBTYPE = 3
|
||||
"""Old BSON binary subtype for a UUID.
|
||||
|
||||
:class:`uuid.UUID` instances will automatically be encoded
|
||||
by :mod:`bson` using this subtype.
|
||||
|
||||
.. versionadded:: 2.1
|
||||
"""
|
||||
|
||||
UUID_SUBTYPE = 4
|
||||
"""BSON binary subtype for a UUID.
|
||||
|
||||
This is the new BSON binary subtype for UUIDs. The
|
||||
current default is :data:`OLD_UUID_SUBTYPE`.
|
||||
|
||||
.. versionchanged:: 2.1
|
||||
Changed to subtype 4.
|
||||
"""
|
||||
|
||||
STANDARD = UUID_SUBTYPE
|
||||
"""The standard UUID representation.
|
||||
|
||||
:class:`uuid.UUID` instances will automatically be encoded to
|
||||
and decoded from BSON binary, using RFC-4122 byte order with
|
||||
binary subtype :data:`UUID_SUBTYPE`.
|
||||
|
||||
.. versionadded:: 3.0
|
||||
"""
|
||||
|
||||
PYTHON_LEGACY = OLD_UUID_SUBTYPE
|
||||
"""The Python legacy UUID representation.
|
||||
|
||||
:class:`uuid.UUID` instances will automatically be encoded to
|
||||
and decoded from BSON binary, using RFC-4122 byte order with
|
||||
binary subtype :data:`OLD_UUID_SUBTYPE`.
|
||||
|
||||
.. versionadded:: 3.0
|
||||
"""
|
||||
|
||||
JAVA_LEGACY = 5
|
||||
"""The Java legacy UUID representation.
|
||||
|
||||
:class:`uuid.UUID` instances will automatically be encoded to
|
||||
and decoded from BSON binary subtype :data:`OLD_UUID_SUBTYPE`,
|
||||
using the Java driver's legacy byte order.
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
BSON binary subtype 4 is decoded using RFC-4122 byte order.
|
||||
.. versionadded:: 2.3
|
||||
"""
|
||||
|
||||
CSHARP_LEGACY = 6
|
||||
"""The C#/.net legacy UUID representation.
|
||||
|
||||
:class:`uuid.UUID` instances will automatically be encoded to
|
||||
and decoded from BSON binary subtype :data:`OLD_UUID_SUBTYPE`,
|
||||
using the C# driver's legacy byte order.
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
BSON binary subtype 4 is decoded using RFC-4122 byte order.
|
||||
.. versionadded:: 2.3
|
||||
"""
|
||||
|
||||
ALL_UUID_SUBTYPES = (OLD_UUID_SUBTYPE, UUID_SUBTYPE)
|
||||
ALL_UUID_REPRESENTATIONS = (STANDARD, PYTHON_LEGACY, JAVA_LEGACY, CSHARP_LEGACY)
|
||||
UUID_REPRESENTATION_NAMES = {
|
||||
PYTHON_LEGACY: 'PYTHON_LEGACY',
|
||||
STANDARD: 'STANDARD',
|
||||
JAVA_LEGACY: 'JAVA_LEGACY',
|
||||
CSHARP_LEGACY: 'CSHARP_LEGACY'}
|
||||
|
||||
MD5_SUBTYPE = 5
|
||||
"""BSON binary subtype for an MD5 hash.
|
||||
"""
|
||||
|
||||
USER_DEFINED_SUBTYPE = 128
|
||||
"""BSON binary subtype for any user defined structure.
|
||||
"""
|
||||
|
||||
|
||||
class Binary(bytes):
|
||||
"""Representation of BSON binary data.
|
||||
|
||||
This is necessary because we want to represent Python strings as
|
||||
the BSON string type. We need to wrap binary data so we can tell
|
||||
the difference between what should be considered binary data and
|
||||
what should be considered a string when we encode to BSON.
|
||||
|
||||
Raises TypeError if `data` is not an instance of :class:`bytes`
|
||||
(:class:`str` in python 2) or `subtype` is not an instance of
|
||||
:class:`int`. Raises ValueError if `subtype` is not in [0, 256).
|
||||
|
||||
.. note::
|
||||
In python 3 instances of Binary with subtype 0 will be decoded
|
||||
directly to :class:`bytes`.
|
||||
|
||||
:Parameters:
|
||||
- `data`: the binary data to represent. Can be any bytes-like type
|
||||
that implements the buffer protocol.
|
||||
- `subtype` (optional): the `binary subtype
|
||||
<http://bsonspec.org/#/specification>`_
|
||||
to use
|
||||
|
||||
.. versionchanged:: 3.9
|
||||
Support any bytes-like type that implements the buffer protocol.
|
||||
"""
|
||||
|
||||
_type_marker = 5
|
||||
|
||||
def __new__(cls, data, subtype=BINARY_SUBTYPE):
|
||||
if not isinstance(subtype, int):
|
||||
raise TypeError("subtype must be an instance of int")
|
||||
if subtype >= 256 or subtype < 0:
|
||||
raise ValueError("subtype must be contained in [0, 256)")
|
||||
# Support any type that implements the buffer protocol.
|
||||
self = bytes.__new__(cls, memoryview(data).tobytes())
|
||||
self.__subtype = subtype
|
||||
return self
|
||||
|
||||
@property
|
||||
def subtype(self):
|
||||
"""Subtype of this binary data.
|
||||
"""
|
||||
return self.__subtype
|
||||
|
||||
def __getnewargs__(self):
|
||||
# Work around http://bugs.python.org/issue7382
|
||||
data = super(Binary, self).__getnewargs__()[0]
|
||||
if PY3 and not isinstance(data, bytes):
|
||||
data = data.encode('latin-1')
|
||||
return data, self.__subtype
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, Binary):
|
||||
return ((self.__subtype, bytes(self)) ==
|
||||
(other.subtype, bytes(other)))
|
||||
# We don't return NotImplemented here because if we did then
|
||||
# Binary("foo") == "foo" would return True, since Binary is a
|
||||
# subclass of str...
|
||||
return False
|
||||
|
||||
def __hash__(self):
|
||||
return super(Binary, self).__hash__() ^ hash(self.__subtype)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __repr__(self):
|
||||
return "Binary(%s, %s)" % (bytes.__repr__(self), self.__subtype)
|
||||
|
||||
|
||||
class UUIDLegacy(Binary):
|
||||
"""UUID wrapper to support working with UUIDs stored as PYTHON_LEGACY.
|
||||
|
||||
.. doctest::
|
||||
|
||||
>>> import uuid
|
||||
>>> from bson.binary import Binary, UUIDLegacy, STANDARD
|
||||
>>> from bson.codec_options import CodecOptions
|
||||
>>> my_uuid = uuid.uuid4()
|
||||
>>> coll = db.get_collection('test',
|
||||
... CodecOptions(uuid_representation=STANDARD))
|
||||
>>> coll.insert_one({'uuid': Binary(my_uuid.bytes, 3)}).inserted_id
|
||||
ObjectId('...')
|
||||
>>> coll.count_documents({'uuid': my_uuid})
|
||||
0
|
||||
>>> coll.count_documents({'uuid': UUIDLegacy(my_uuid)})
|
||||
1
|
||||
>>> coll.find({'uuid': UUIDLegacy(my_uuid)})[0]['uuid']
|
||||
UUID('...')
|
||||
>>>
|
||||
>>> # Convert from subtype 3 to subtype 4
|
||||
>>> doc = coll.find_one({'uuid': UUIDLegacy(my_uuid)})
|
||||
>>> coll.replace_one({"_id": doc["_id"]}, doc).matched_count
|
||||
1
|
||||
>>> coll.count_documents({'uuid': UUIDLegacy(my_uuid)})
|
||||
0
|
||||
>>> coll.count_documents({'uuid': {'$in': [UUIDLegacy(my_uuid), my_uuid]}})
|
||||
1
|
||||
>>> coll.find_one({'uuid': my_uuid})['uuid']
|
||||
UUID('...')
|
||||
|
||||
Raises TypeError if `obj` is not an instance of :class:`~uuid.UUID`.
|
||||
|
||||
:Parameters:
|
||||
- `obj`: An instance of :class:`~uuid.UUID`.
|
||||
"""
|
||||
|
||||
def __new__(cls, obj):
|
||||
if not isinstance(obj, UUID):
|
||||
raise TypeError("obj must be an instance of uuid.UUID")
|
||||
self = Binary.__new__(cls, obj.bytes, OLD_UUID_SUBTYPE)
|
||||
self.__uuid = obj
|
||||
return self
|
||||
|
||||
def __getnewargs__(self):
|
||||
# Support copy and deepcopy
|
||||
return (self.__uuid,)
|
||||
|
||||
@property
|
||||
def uuid(self):
|
||||
"""UUID instance wrapped by this UUIDLegacy instance.
|
||||
"""
|
||||
return self.__uuid
|
||||
|
||||
def __repr__(self):
|
||||
return "UUIDLegacy('%s')" % self.__uuid
|
@ -1,99 +0,0 @@
|
||||
# Copyright 2009-present MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Tools for representing JavaScript code in BSON.
|
||||
"""
|
||||
|
||||
from bson.py3compat import abc, string_type, PY3, text_type
|
||||
|
||||
|
||||
class Code(str):
|
||||
"""BSON's JavaScript code type.
|
||||
|
||||
Raises :class:`TypeError` if `code` is not an instance of
|
||||
:class:`basestring` (:class:`str` in python 3) or `scope`
|
||||
is not ``None`` or an instance of :class:`dict`.
|
||||
|
||||
Scope variables can be set by passing a dictionary as the `scope`
|
||||
argument or by using keyword arguments. If a variable is set as a
|
||||
keyword argument it will override any setting for that variable in
|
||||
the `scope` dictionary.
|
||||
|
||||
:Parameters:
|
||||
- `code`: A string containing JavaScript code to be evaluated or another
|
||||
instance of Code. In the latter case, the scope of `code` becomes this
|
||||
Code's :attr:`scope`.
|
||||
- `scope` (optional): dictionary representing the scope in which
|
||||
`code` should be evaluated - a mapping from identifiers (as
|
||||
strings) to values. Defaults to ``None``. This is applied after any
|
||||
scope associated with a given `code` above.
|
||||
- `**kwargs` (optional): scope variables can also be passed as
|
||||
keyword arguments. These are applied after `scope` and `code`.
|
||||
|
||||
.. versionchanged:: 3.4
|
||||
The default value for :attr:`scope` is ``None`` instead of ``{}``.
|
||||
|
||||
"""
|
||||
|
||||
_type_marker = 13
|
||||
|
||||
def __new__(cls, code, scope=None, **kwargs):
|
||||
if not isinstance(code, string_type):
|
||||
raise TypeError("code must be an "
|
||||
"instance of %s" % (string_type.__name__))
|
||||
|
||||
if not PY3 and isinstance(code, text_type):
|
||||
self = str.__new__(cls, code.encode('utf8'))
|
||||
else:
|
||||
self = str.__new__(cls, code)
|
||||
|
||||
try:
|
||||
self.__scope = code.scope
|
||||
except AttributeError:
|
||||
self.__scope = None
|
||||
|
||||
if scope is not None:
|
||||
if not isinstance(scope, abc.Mapping):
|
||||
raise TypeError("scope must be an instance of dict")
|
||||
if self.__scope is not None:
|
||||
self.__scope.update(scope)
|
||||
else:
|
||||
self.__scope = scope
|
||||
|
||||
if kwargs:
|
||||
if self.__scope is not None:
|
||||
self.__scope.update(kwargs)
|
||||
else:
|
||||
self.__scope = kwargs
|
||||
|
||||
return self
|
||||
|
||||
@property
|
||||
def scope(self):
|
||||
"""Scope dictionary for this instance or ``None``.
|
||||
"""
|
||||
return self.__scope
|
||||
|
||||
def __repr__(self):
|
||||
return "Code(%s, %r)" % (str.__repr__(self), self.__scope)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, Code):
|
||||
return (self.__scope, str(self)) == (other.__scope, str(other))
|
||||
return False
|
||||
|
||||
__hash__ = None
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
@ -1,334 +0,0 @@
|
||||
# Copyright 2014-present MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Tools for specifying BSON codec options."""
|
||||
|
||||
import datetime
|
||||
|
||||
from abc import abstractmethod
|
||||
from collections import namedtuple
|
||||
|
||||
from bson.py3compat import ABC, abc, abstractproperty, string_type
|
||||
|
||||
from bson.binary import (ALL_UUID_REPRESENTATIONS,
|
||||
PYTHON_LEGACY,
|
||||
UUID_REPRESENTATION_NAMES)
|
||||
|
||||
|
||||
_RAW_BSON_DOCUMENT_MARKER = 101
|
||||
|
||||
|
||||
def _raw_document_class(document_class):
|
||||
"""Determine if a document_class is a RawBSONDocument class."""
|
||||
marker = getattr(document_class, '_type_marker', None)
|
||||
return marker == _RAW_BSON_DOCUMENT_MARKER
|
||||
|
||||
|
||||
class TypeEncoder(ABC):
|
||||
"""Base class for defining type codec classes which describe how a
|
||||
custom type can be transformed to one of the types BSON understands.
|
||||
|
||||
Codec classes must implement the ``python_type`` attribute, and the
|
||||
``transform_python`` method to support encoding.
|
||||
|
||||
See :ref:`custom-type-type-codec` documentation for an example.
|
||||
"""
|
||||
@abstractproperty
|
||||
def python_type(self):
|
||||
"""The Python type to be converted into something serializable."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def transform_python(self, value):
|
||||
"""Convert the given Python object into something serializable."""
|
||||
pass
|
||||
|
||||
|
||||
class TypeDecoder(ABC):
|
||||
"""Base class for defining type codec classes which describe how a
|
||||
BSON type can be transformed to a custom type.
|
||||
|
||||
Codec classes must implement the ``bson_type`` attribute, and the
|
||||
``transform_bson`` method to support decoding.
|
||||
|
||||
See :ref:`custom-type-type-codec` documentation for an example.
|
||||
"""
|
||||
@abstractproperty
|
||||
def bson_type(self):
|
||||
"""The BSON type to be converted into our own type."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def transform_bson(self, value):
|
||||
"""Convert the given BSON value into our own type."""
|
||||
pass
|
||||
|
||||
|
||||
class TypeCodec(TypeEncoder, TypeDecoder):
|
||||
"""Base class for defining type codec classes which describe how a
|
||||
custom type can be transformed to/from one of the types :mod:`bson`
|
||||
can already encode/decode.
|
||||
|
||||
Codec classes must implement the ``python_type`` attribute, and the
|
||||
``transform_python`` method to support encoding, as well as the
|
||||
``bson_type`` attribute, and the ``transform_bson`` method to support
|
||||
decoding.
|
||||
|
||||
See :ref:`custom-type-type-codec` documentation for an example.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class TypeRegistry(object):
|
||||
"""Encapsulates type codecs used in encoding and / or decoding BSON, as
|
||||
well as the fallback encoder. Type registries cannot be modified after
|
||||
instantiation.
|
||||
|
||||
``TypeRegistry`` can be initialized with an iterable of type codecs, and
|
||||
a callable for the fallback encoder::
|
||||
|
||||
>>> from bson.codec_options import TypeRegistry
|
||||
>>> type_registry = TypeRegistry([Codec1, Codec2, Codec3, ...],
|
||||
... fallback_encoder)
|
||||
|
||||
See :ref:`custom-type-type-registry` documentation for an example.
|
||||
|
||||
:Parameters:
|
||||
- `type_codecs` (optional): iterable of type codec instances. If
|
||||
``type_codecs`` contains multiple codecs that transform a single
|
||||
python or BSON type, the transformation specified by the type codec
|
||||
occurring last prevails. A TypeError will be raised if one or more
|
||||
type codecs modify the encoding behavior of a built-in :mod:`bson`
|
||||
type.
|
||||
- `fallback_encoder` (optional): callable that accepts a single,
|
||||
unencodable python value and transforms it into a type that
|
||||
:mod:`bson` can encode. See :ref:`fallback-encoder-callable`
|
||||
documentation for an example.
|
||||
"""
|
||||
def __init__(self, type_codecs=None, fallback_encoder=None):
|
||||
self.__type_codecs = list(type_codecs or [])
|
||||
self._fallback_encoder = fallback_encoder
|
||||
self._encoder_map = {}
|
||||
self._decoder_map = {}
|
||||
|
||||
if self._fallback_encoder is not None:
|
||||
if not callable(fallback_encoder):
|
||||
raise TypeError("fallback_encoder %r is not a callable" % (
|
||||
fallback_encoder))
|
||||
|
||||
for codec in self.__type_codecs:
|
||||
is_valid_codec = False
|
||||
if isinstance(codec, TypeEncoder):
|
||||
self._validate_type_encoder(codec)
|
||||
is_valid_codec = True
|
||||
self._encoder_map[codec.python_type] = codec.transform_python
|
||||
if isinstance(codec, TypeDecoder):
|
||||
is_valid_codec = True
|
||||
self._decoder_map[codec.bson_type] = codec.transform_bson
|
||||
if not is_valid_codec:
|
||||
raise TypeError(
|
||||
"Expected an instance of %s, %s, or %s, got %r instead" % (
|
||||
TypeEncoder.__name__, TypeDecoder.__name__,
|
||||
TypeCodec.__name__, codec))
|
||||
|
||||
def _validate_type_encoder(self, codec):
|
||||
from bson import _BUILT_IN_TYPES
|
||||
for pytype in _BUILT_IN_TYPES:
|
||||
if issubclass(codec.python_type, pytype):
|
||||
err_msg = ("TypeEncoders cannot change how built-in types are "
|
||||
"encoded (encoder %s transforms type %s)" %
|
||||
(codec, pytype))
|
||||
raise TypeError(err_msg)
|
||||
|
||||
def __repr__(self):
|
||||
return ('%s(type_codecs=%r, fallback_encoder=%r)' % (
|
||||
self.__class__.__name__, self.__type_codecs,
|
||||
self._fallback_encoder))
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, type(self)):
|
||||
return NotImplemented
|
||||
return ((self._decoder_map == other._decoder_map) and
|
||||
(self._encoder_map == other._encoder_map) and
|
||||
(self._fallback_encoder == other._fallback_encoder))
|
||||
|
||||
|
||||
_options_base = namedtuple(
|
||||
'CodecOptions',
|
||||
('document_class', 'tz_aware', 'uuid_representation',
|
||||
'unicode_decode_error_handler', 'tzinfo', 'type_registry'))
|
||||
|
||||
|
||||
class CodecOptions(_options_base):
|
||||
"""Encapsulates options used encoding and / or decoding BSON.
|
||||
|
||||
The `document_class` option is used to define a custom type for use
|
||||
decoding BSON documents. Access to the underlying raw BSON bytes for
|
||||
a document is available using the :class:`~bson.raw_bson.RawBSONDocument`
|
||||
type::
|
||||
|
||||
>>> from bson.raw_bson import RawBSONDocument
|
||||
>>> from bson.codec_options import CodecOptions
|
||||
>>> codec_options = CodecOptions(document_class=RawBSONDocument)
|
||||
>>> coll = db.get_collection('test', codec_options=codec_options)
|
||||
>>> doc = coll.find_one()
|
||||
>>> doc.raw
|
||||
'\\x16\\x00\\x00\\x00\\x07_id\\x00[0\\x165\\x91\\x10\\xea\\x14\\xe8\\xc5\\x8b\\x93\\x00'
|
||||
|
||||
The document class can be any type that inherits from
|
||||
:class:`~collections.MutableMapping`::
|
||||
|
||||
>>> class AttributeDict(dict):
|
||||
... # A dict that supports attribute access.
|
||||
... def __getattr__(self, key):
|
||||
... return self[key]
|
||||
... def __setattr__(self, key, value):
|
||||
... self[key] = value
|
||||
...
|
||||
>>> codec_options = CodecOptions(document_class=AttributeDict)
|
||||
>>> coll = db.get_collection('test', codec_options=codec_options)
|
||||
>>> doc = coll.find_one()
|
||||
>>> doc._id
|
||||
ObjectId('5b3016359110ea14e8c58b93')
|
||||
|
||||
See :doc:`/examples/datetimes` for examples using the `tz_aware` and
|
||||
`tzinfo` options.
|
||||
|
||||
See :class:`~bson.binary.UUIDLegacy` for examples using the
|
||||
`uuid_representation` option.
|
||||
|
||||
:Parameters:
|
||||
- `document_class`: BSON documents returned in queries will be decoded
|
||||
to an instance of this class. Must be a subclass of
|
||||
:class:`~collections.MutableMapping`. Defaults to :class:`dict`.
|
||||
- `tz_aware`: If ``True``, BSON datetimes will be decoded to timezone
|
||||
aware instances of :class:`~datetime.datetime`. Otherwise they will be
|
||||
naive. Defaults to ``False``.
|
||||
- `uuid_representation`: The BSON representation to use when encoding
|
||||
and decoding instances of :class:`~uuid.UUID`. Defaults to
|
||||
:data:`~bson.binary.PYTHON_LEGACY`.
|
||||
- `unicode_decode_error_handler`: The error handler to apply when
|
||||
a Unicode-related error occurs during BSON decoding that would
|
||||
otherwise raise :exc:`UnicodeDecodeError`. Valid options include
|
||||
'strict', 'replace', and 'ignore'. Defaults to 'strict'.
|
||||
- `tzinfo`: A :class:`~datetime.tzinfo` subclass that specifies the
|
||||
timezone to/from which :class:`~datetime.datetime` objects should be
|
||||
encoded/decoded.
|
||||
- `type_registry`: Instance of :class:`TypeRegistry` used to customize
|
||||
encoding and decoding behavior.
|
||||
|
||||
.. versionadded:: 3.8
|
||||
`type_registry` attribute.
|
||||
|
||||
.. warning:: Care must be taken when changing
|
||||
`unicode_decode_error_handler` from its default value ('strict').
|
||||
The 'replace' and 'ignore' modes should not be used when documents
|
||||
retrieved from the server will be modified in the client application
|
||||
and stored back to the server.
|
||||
"""
|
||||
|
||||
def __new__(cls, document_class=dict,
|
||||
tz_aware=False, uuid_representation=PYTHON_LEGACY,
|
||||
unicode_decode_error_handler="strict",
|
||||
tzinfo=None, type_registry=None):
|
||||
if not (issubclass(document_class, abc.MutableMapping) or
|
||||
_raw_document_class(document_class)):
|
||||
raise TypeError("document_class must be dict, bson.son.SON, "
|
||||
"bson.raw_bson.RawBSONDocument, or a "
|
||||
"sublass of collections.MutableMapping")
|
||||
if not isinstance(tz_aware, bool):
|
||||
raise TypeError("tz_aware must be True or False")
|
||||
if uuid_representation not in ALL_UUID_REPRESENTATIONS:
|
||||
raise ValueError("uuid_representation must be a value "
|
||||
"from bson.binary.ALL_UUID_REPRESENTATIONS")
|
||||
if not isinstance(unicode_decode_error_handler, (string_type, None)):
|
||||
raise ValueError("unicode_decode_error_handler must be a string "
|
||||
"or None")
|
||||
if tzinfo is not None:
|
||||
if not isinstance(tzinfo, datetime.tzinfo):
|
||||
raise TypeError(
|
||||
"tzinfo must be an instance of datetime.tzinfo")
|
||||
if not tz_aware:
|
||||
raise ValueError(
|
||||
"cannot specify tzinfo without also setting tz_aware=True")
|
||||
|
||||
type_registry = type_registry or TypeRegistry()
|
||||
|
||||
if not isinstance(type_registry, TypeRegistry):
|
||||
raise TypeError("type_registry must be an instance of TypeRegistry")
|
||||
|
||||
return tuple.__new__(
|
||||
cls, (document_class, tz_aware, uuid_representation,
|
||||
unicode_decode_error_handler, tzinfo, type_registry))
|
||||
|
||||
def _arguments_repr(self):
|
||||
"""Representation of the arguments used to create this object."""
|
||||
document_class_repr = (
|
||||
'dict' if self.document_class is dict
|
||||
else repr(self.document_class))
|
||||
|
||||
uuid_rep_repr = UUID_REPRESENTATION_NAMES.get(self.uuid_representation,
|
||||
self.uuid_representation)
|
||||
|
||||
return ('document_class=%s, tz_aware=%r, uuid_representation=%s, '
|
||||
'unicode_decode_error_handler=%r, tzinfo=%r, '
|
||||
'type_registry=%r' %
|
||||
(document_class_repr, self.tz_aware, uuid_rep_repr,
|
||||
self.unicode_decode_error_handler, self.tzinfo,
|
||||
self.type_registry))
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s)' % (self.__class__.__name__, self._arguments_repr())
|
||||
|
||||
def with_options(self, **kwargs):
|
||||
"""Make a copy of this CodecOptions, overriding some options::
|
||||
|
||||
>>> from bson.codec_options import DEFAULT_CODEC_OPTIONS
|
||||
>>> DEFAULT_CODEC_OPTIONS.tz_aware
|
||||
False
|
||||
>>> options = DEFAULT_CODEC_OPTIONS.with_options(tz_aware=True)
|
||||
>>> options.tz_aware
|
||||
True
|
||||
|
||||
.. versionadded:: 3.5
|
||||
"""
|
||||
return CodecOptions(
|
||||
kwargs.get('document_class', self.document_class),
|
||||
kwargs.get('tz_aware', self.tz_aware),
|
||||
kwargs.get('uuid_representation', self.uuid_representation),
|
||||
kwargs.get('unicode_decode_error_handler',
|
||||
self.unicode_decode_error_handler),
|
||||
kwargs.get('tzinfo', self.tzinfo),
|
||||
kwargs.get('type_registry', self.type_registry)
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_CODEC_OPTIONS = CodecOptions()
|
||||
|
||||
|
||||
def _parse_codec_options(options):
|
||||
"""Parse BSON codec options."""
|
||||
return CodecOptions(
|
||||
document_class=options.get(
|
||||
'document_class', DEFAULT_CODEC_OPTIONS.document_class),
|
||||
tz_aware=options.get(
|
||||
'tz_aware', DEFAULT_CODEC_OPTIONS.tz_aware),
|
||||
uuid_representation=options.get(
|
||||
'uuidrepresentation', DEFAULT_CODEC_OPTIONS.uuid_representation),
|
||||
unicode_decode_error_handler=options.get(
|
||||
'unicode_decode_error_handler',
|
||||
DEFAULT_CODEC_OPTIONS.unicode_decode_error_handler),
|
||||
tzinfo=options.get('tzinfo', DEFAULT_CODEC_OPTIONS.tzinfo),
|
||||
type_registry=options.get(
|
||||
'type_registry', DEFAULT_CODEC_OPTIONS.type_registry))
|
@ -1,135 +0,0 @@
|
||||
# Copyright 2009-2015 MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Tools for manipulating DBRefs (references to MongoDB documents)."""
|
||||
|
||||
from copy import deepcopy
|
||||
|
||||
from bson.py3compat import iteritems, string_type
|
||||
from bson.son import SON
|
||||
|
||||
|
||||
class DBRef(object):
|
||||
"""A reference to a document stored in MongoDB.
|
||||
"""
|
||||
|
||||
# DBRef isn't actually a BSON "type" so this number was arbitrarily chosen.
|
||||
_type_marker = 100
|
||||
|
||||
def __init__(self, collection, id, database=None, _extra={}, **kwargs):
|
||||
"""Initialize a new :class:`DBRef`.
|
||||
|
||||
Raises :class:`TypeError` if `collection` or `database` is not
|
||||
an instance of :class:`basestring` (:class:`str` in python 3).
|
||||
`database` is optional and allows references to documents to work
|
||||
across databases. Any additional keyword arguments will create
|
||||
additional fields in the resultant embedded document.
|
||||
|
||||
:Parameters:
|
||||
- `collection`: name of the collection the document is stored in
|
||||
- `id`: the value of the document's ``"_id"`` field
|
||||
- `database` (optional): name of the database to reference
|
||||
- `**kwargs` (optional): additional keyword arguments will
|
||||
create additional, custom fields
|
||||
|
||||
.. mongodoc:: dbrefs
|
||||
"""
|
||||
if not isinstance(collection, string_type):
|
||||
raise TypeError("collection must be an "
|
||||
"instance of %s" % string_type.__name__)
|
||||
if database is not None and not isinstance(database, string_type):
|
||||
raise TypeError("database must be an "
|
||||
"instance of %s" % string_type.__name__)
|
||||
|
||||
self.__collection = collection
|
||||
self.__id = id
|
||||
self.__database = database
|
||||
kwargs.update(_extra)
|
||||
self.__kwargs = kwargs
|
||||
|
||||
@property
|
||||
def collection(self):
|
||||
"""Get the name of this DBRef's collection as unicode.
|
||||
"""
|
||||
return self.__collection
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
"""Get this DBRef's _id.
|
||||
"""
|
||||
return self.__id
|
||||
|
||||
@property
|
||||
def database(self):
|
||||
"""Get the name of this DBRef's database.
|
||||
|
||||
Returns None if this DBRef doesn't specify a database.
|
||||
"""
|
||||
return self.__database
|
||||
|
||||
def __getattr__(self, key):
|
||||
try:
|
||||
return self.__kwargs[key]
|
||||
except KeyError:
|
||||
raise AttributeError(key)
|
||||
|
||||
# Have to provide __setstate__ to avoid
|
||||
# infinite recursion since we override
|
||||
# __getattr__.
|
||||
def __setstate__(self, state):
|
||||
self.__dict__.update(state)
|
||||
|
||||
def as_doc(self):
|
||||
"""Get the SON document representation of this DBRef.
|
||||
|
||||
Generally not needed by application developers
|
||||
"""
|
||||
doc = SON([("$ref", self.collection),
|
||||
("$id", self.id)])
|
||||
if self.database is not None:
|
||||
doc["$db"] = self.database
|
||||
doc.update(self.__kwargs)
|
||||
return doc
|
||||
|
||||
def __repr__(self):
|
||||
extra = "".join([", %s=%r" % (k, v)
|
||||
for k, v in iteritems(self.__kwargs)])
|
||||
if self.database is None:
|
||||
return "DBRef(%r, %r%s)" % (self.collection, self.id, extra)
|
||||
return "DBRef(%r, %r, %r%s)" % (self.collection, self.id,
|
||||
self.database, extra)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, DBRef):
|
||||
us = (self.__database, self.__collection,
|
||||
self.__id, self.__kwargs)
|
||||
them = (other.__database, other.__collection,
|
||||
other.__id, other.__kwargs)
|
||||
return us == them
|
||||
return NotImplemented
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
"""Get a hash value for this :class:`DBRef`."""
|
||||
return hash((self.__collection, self.__id, self.__database,
|
||||
tuple(sorted(self.__kwargs.items()))))
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
"""Support function for `copy.deepcopy()`."""
|
||||
return DBRef(deepcopy(self.__collection, memo),
|
||||
deepcopy(self.__id, memo),
|
||||
deepcopy(self.__database, memo),
|
||||
deepcopy(self.__kwargs, memo))
|
@ -1,335 +0,0 @@
|
||||
# Copyright 2016-present MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Tools for working with the BSON decimal128 type.
|
||||
|
||||
.. versionadded:: 3.4
|
||||
|
||||
.. note:: The Decimal128 BSON type requires MongoDB 3.4+.
|
||||
"""
|
||||
|
||||
import decimal
|
||||
import struct
|
||||
import sys
|
||||
|
||||
from bson.py3compat import (PY3 as _PY3,
|
||||
string_type as _string_type)
|
||||
|
||||
|
||||
if _PY3:
|
||||
_from_bytes = int.from_bytes # pylint: disable=no-member, invalid-name
|
||||
else:
|
||||
import binascii
|
||||
def _from_bytes(value, dummy, _int=int, _hexlify=binascii.hexlify):
|
||||
"An implementation of int.from_bytes for python 2.x."
|
||||
return _int(_hexlify(value), 16)
|
||||
|
||||
|
||||
_PACK_64 = struct.Struct("<Q").pack
|
||||
_UNPACK_64 = struct.Struct("<Q").unpack
|
||||
|
||||
_EXPONENT_MASK = 3 << 61
|
||||
_EXPONENT_BIAS = 6176
|
||||
_EXPONENT_MAX = 6144
|
||||
_EXPONENT_MIN = -6143
|
||||
_MAX_DIGITS = 34
|
||||
|
||||
_INF = 0x7800000000000000
|
||||
_NAN = 0x7c00000000000000
|
||||
_SNAN = 0x7e00000000000000
|
||||
_SIGN = 0x8000000000000000
|
||||
|
||||
_NINF = (_INF + _SIGN, 0)
|
||||
_PINF = (_INF, 0)
|
||||
_NNAN = (_NAN + _SIGN, 0)
|
||||
_PNAN = (_NAN, 0)
|
||||
_NSNAN = (_SNAN + _SIGN, 0)
|
||||
_PSNAN = (_SNAN, 0)
|
||||
|
||||
_CTX_OPTIONS = {
|
||||
'prec': _MAX_DIGITS,
|
||||
'rounding': decimal.ROUND_HALF_EVEN,
|
||||
'Emin': _EXPONENT_MIN,
|
||||
'Emax': _EXPONENT_MAX,
|
||||
'capitals': 1,
|
||||
'flags': [],
|
||||
'traps': [decimal.InvalidOperation,
|
||||
decimal.Overflow,
|
||||
decimal.Inexact]
|
||||
}
|
||||
|
||||
try:
|
||||
# Python >= 3.3, cdecimal
|
||||
decimal.Context(clamp=1) # pylint: disable=unexpected-keyword-arg
|
||||
_CTX_OPTIONS['clamp'] = 1
|
||||
except TypeError:
|
||||
# Python < 3.3
|
||||
_CTX_OPTIONS['_clamp'] = 1
|
||||
|
||||
_DEC128_CTX = decimal.Context(**_CTX_OPTIONS.copy())
|
||||
|
||||
|
||||
def create_decimal128_context():
|
||||
"""Returns an instance of :class:`decimal.Context` appropriate
|
||||
for working with IEEE-754 128-bit decimal floating point values.
|
||||
"""
|
||||
opts = _CTX_OPTIONS.copy()
|
||||
opts['traps'] = []
|
||||
return decimal.Context(**opts)
|
||||
|
||||
|
||||
def _decimal_to_128(value):
|
||||
"""Converts a decimal.Decimal to BID (high bits, low bits).
|
||||
|
||||
:Parameters:
|
||||
- `value`: An instance of decimal.Decimal
|
||||
"""
|
||||
with decimal.localcontext(_DEC128_CTX) as ctx:
|
||||
value = ctx.create_decimal(value)
|
||||
|
||||
if value.is_infinite():
|
||||
return _NINF if value.is_signed() else _PINF
|
||||
|
||||
sign, digits, exponent = value.as_tuple()
|
||||
|
||||
if value.is_nan():
|
||||
if digits:
|
||||
raise ValueError("NaN with debug payload is not supported")
|
||||
if value.is_snan():
|
||||
return _NSNAN if value.is_signed() else _PSNAN
|
||||
return _NNAN if value.is_signed() else _PNAN
|
||||
|
||||
significand = int("".join([str(digit) for digit in digits]))
|
||||
bit_length = significand.bit_length()
|
||||
|
||||
high = 0
|
||||
low = 0
|
||||
for i in range(min(64, bit_length)):
|
||||
if significand & (1 << i):
|
||||
low |= 1 << i
|
||||
|
||||
for i in range(64, bit_length):
|
||||
if significand & (1 << i):
|
||||
high |= 1 << (i - 64)
|
||||
|
||||
biased_exponent = exponent + _EXPONENT_BIAS
|
||||
|
||||
if high >> 49 == 1:
|
||||
high = high & 0x7fffffffffff
|
||||
high |= _EXPONENT_MASK
|
||||
high |= (biased_exponent & 0x3fff) << 47
|
||||
else:
|
||||
high |= biased_exponent << 49
|
||||
|
||||
if sign:
|
||||
high |= _SIGN
|
||||
|
||||
return high, low
|
||||
|
||||
|
||||
class Decimal128(object):
|
||||
"""BSON Decimal128 type::
|
||||
|
||||
>>> Decimal128(Decimal("0.0005"))
|
||||
Decimal128('0.0005')
|
||||
>>> Decimal128("0.0005")
|
||||
Decimal128('0.0005')
|
||||
>>> Decimal128((3474527112516337664, 5))
|
||||
Decimal128('0.0005')
|
||||
|
||||
:Parameters:
|
||||
- `value`: An instance of :class:`decimal.Decimal`, string, or tuple of
|
||||
(high bits, low bits) from Binary Integer Decimal (BID) format.
|
||||
|
||||
.. note:: :class:`~Decimal128` uses an instance of :class:`decimal.Context`
|
||||
configured for IEEE-754 Decimal128 when validating parameters.
|
||||
Signals like :class:`decimal.InvalidOperation`, :class:`decimal.Inexact`,
|
||||
and :class:`decimal.Overflow` are trapped and raised as exceptions::
|
||||
|
||||
>>> Decimal128(".13.1")
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
...
|
||||
decimal.InvalidOperation: [<class 'decimal.ConversionSyntax'>]
|
||||
>>>
|
||||
>>> Decimal128("1E-6177")
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
...
|
||||
decimal.Inexact: [<class 'decimal.Inexact'>]
|
||||
>>>
|
||||
>>> Decimal128("1E6145")
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
...
|
||||
decimal.Overflow: [<class 'decimal.Overflow'>, <class 'decimal.Rounded'>]
|
||||
|
||||
To ensure the result of a calculation can always be stored as BSON
|
||||
Decimal128 use the context returned by
|
||||
:func:`create_decimal128_context`::
|
||||
|
||||
>>> import decimal
|
||||
>>> decimal128_ctx = create_decimal128_context()
|
||||
>>> with decimal.localcontext(decimal128_ctx) as ctx:
|
||||
... Decimal128(ctx.create_decimal(".13.3"))
|
||||
...
|
||||
Decimal128('NaN')
|
||||
>>>
|
||||
>>> with decimal.localcontext(decimal128_ctx) as ctx:
|
||||
... Decimal128(ctx.create_decimal("1E-6177"))
|
||||
...
|
||||
Decimal128('0E-6176')
|
||||
>>>
|
||||
>>> with decimal.localcontext(DECIMAL128_CTX) as ctx:
|
||||
... Decimal128(ctx.create_decimal("1E6145"))
|
||||
...
|
||||
Decimal128('Infinity')
|
||||
|
||||
To match the behavior of MongoDB's Decimal128 implementation
|
||||
str(Decimal(value)) may not match str(Decimal128(value)) for NaN values::
|
||||
|
||||
>>> Decimal128(Decimal('NaN'))
|
||||
Decimal128('NaN')
|
||||
>>> Decimal128(Decimal('-NaN'))
|
||||
Decimal128('NaN')
|
||||
>>> Decimal128(Decimal('sNaN'))
|
||||
Decimal128('NaN')
|
||||
>>> Decimal128(Decimal('-sNaN'))
|
||||
Decimal128('NaN')
|
||||
|
||||
However, :meth:`~Decimal128.to_decimal` will return the exact value::
|
||||
|
||||
>>> Decimal128(Decimal('NaN')).to_decimal()
|
||||
Decimal('NaN')
|
||||
>>> Decimal128(Decimal('-NaN')).to_decimal()
|
||||
Decimal('-NaN')
|
||||
>>> Decimal128(Decimal('sNaN')).to_decimal()
|
||||
Decimal('sNaN')
|
||||
>>> Decimal128(Decimal('-sNaN')).to_decimal()
|
||||
Decimal('-sNaN')
|
||||
|
||||
Two instances of :class:`Decimal128` compare equal if their Binary
|
||||
Integer Decimal encodings are equal::
|
||||
|
||||
>>> Decimal128('NaN') == Decimal128('NaN')
|
||||
True
|
||||
>>> Decimal128('NaN').bid == Decimal128('NaN').bid
|
||||
True
|
||||
|
||||
This differs from :class:`decimal.Decimal` comparisons for NaN::
|
||||
|
||||
>>> Decimal('NaN') == Decimal('NaN')
|
||||
False
|
||||
"""
|
||||
__slots__ = ('__high', '__low')
|
||||
|
||||
_type_marker = 19
|
||||
|
||||
def __init__(self, value):
|
||||
if isinstance(value, (_string_type, decimal.Decimal)):
|
||||
self.__high, self.__low = _decimal_to_128(value)
|
||||
elif isinstance(value, (list, tuple)):
|
||||
if len(value) != 2:
|
||||
raise ValueError('Invalid size for creation of Decimal128 '
|
||||
'from list or tuple. Must have exactly 2 '
|
||||
'elements.')
|
||||
self.__high, self.__low = value
|
||||
else:
|
||||
raise TypeError("Cannot convert %r to Decimal128" % (value,))
|
||||
|
||||
def to_decimal(self):
|
||||
"""Returns an instance of :class:`decimal.Decimal` for this
|
||||
:class:`Decimal128`.
|
||||
"""
|
||||
high = self.__high
|
||||
low = self.__low
|
||||
sign = 1 if (high & _SIGN) else 0
|
||||
|
||||
if (high & _SNAN) == _SNAN:
|
||||
return decimal.Decimal((sign, (), 'N'))
|
||||
elif (high & _NAN) == _NAN:
|
||||
return decimal.Decimal((sign, (), 'n'))
|
||||
elif (high & _INF) == _INF:
|
||||
return decimal.Decimal((sign, (), 'F'))
|
||||
|
||||
if (high & _EXPONENT_MASK) == _EXPONENT_MASK:
|
||||
exponent = ((high & 0x1fffe00000000000) >> 47) - _EXPONENT_BIAS
|
||||
return decimal.Decimal((sign, (0,), exponent))
|
||||
else:
|
||||
exponent = ((high & 0x7fff800000000000) >> 49) - _EXPONENT_BIAS
|
||||
|
||||
arr = bytearray(15)
|
||||
mask = 0x00000000000000ff
|
||||
for i in range(14, 6, -1):
|
||||
arr[i] = (low & mask) >> ((14 - i) << 3)
|
||||
mask = mask << 8
|
||||
|
||||
mask = 0x00000000000000ff
|
||||
for i in range(6, 0, -1):
|
||||
arr[i] = (high & mask) >> ((6 - i) << 3)
|
||||
mask = mask << 8
|
||||
|
||||
mask = 0x0001000000000000
|
||||
arr[0] = (high & mask) >> 48
|
||||
|
||||
# cdecimal only accepts a tuple for digits.
|
||||
digits = tuple(
|
||||
int(digit) for digit in str(_from_bytes(arr, 'big')))
|
||||
|
||||
with decimal.localcontext(_DEC128_CTX) as ctx:
|
||||
return ctx.create_decimal((sign, digits, exponent))
|
||||
|
||||
@classmethod
|
||||
def from_bid(cls, value):
|
||||
"""Create an instance of :class:`Decimal128` from Binary Integer
|
||||
Decimal string.
|
||||
|
||||
:Parameters:
|
||||
- `value`: 16 byte string (128-bit IEEE 754-2008 decimal floating
|
||||
point in Binary Integer Decimal (BID) format).
|
||||
"""
|
||||
if not isinstance(value, bytes):
|
||||
raise TypeError("value must be an instance of bytes")
|
||||
if len(value) != 16:
|
||||
raise ValueError("value must be exactly 16 bytes")
|
||||
return cls((_UNPACK_64(value[8:])[0], _UNPACK_64(value[:8])[0]))
|
||||
|
||||
@property
|
||||
def bid(self):
|
||||
"""The Binary Integer Decimal (BID) encoding of this instance."""
|
||||
return _PACK_64(self.__low) + _PACK_64(self.__high)
|
||||
|
||||
def __str__(self):
|
||||
dec = self.to_decimal()
|
||||
if dec.is_nan():
|
||||
# Required by the drivers spec to match MongoDB behavior.
|
||||
return "NaN"
|
||||
return str(dec)
|
||||
|
||||
def __repr__(self):
|
||||
return "Decimal128('%s')" % (str(self),)
|
||||
|
||||
def __setstate__(self, value):
|
||||
self.__high, self.__low = value
|
||||
|
||||
def __getstate__(self):
|
||||
return self.__high, self.__low
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, Decimal128):
|
||||
return self.bid == other.bid
|
||||
return NotImplemented
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
@ -1,40 +0,0 @@
|
||||
# Copyright 2009-present MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Exceptions raised by the BSON package."""
|
||||
|
||||
|
||||
class BSONError(Exception):
|
||||
"""Base class for all BSON exceptions.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidBSON(BSONError):
|
||||
"""Raised when trying to create a BSON object from invalid data.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidStringData(BSONError):
|
||||
"""Raised when trying to encode a string containing non-UTF8 data.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidDocument(BSONError):
|
||||
"""Raised when trying to create a BSON object from an invalid document.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidId(BSONError):
|
||||
"""Raised when trying to create an ObjectId from invalid data.
|
||||
"""
|
@ -1,34 +0,0 @@
|
||||
# Copyright 2014-2015 MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""A BSON wrapper for long (int in python3)"""
|
||||
|
||||
from bson.py3compat import PY3
|
||||
|
||||
if PY3:
|
||||
long = int
|
||||
|
||||
|
||||
class Int64(long):
|
||||
"""Representation of the BSON int64 type.
|
||||
|
||||
This is necessary because every integral number is an :class:`int` in
|
||||
Python 3. Small integral numbers are encoded to BSON int32 by default,
|
||||
but Int64 numbers will always be encoded to BSON int64.
|
||||
|
||||
:Parameters:
|
||||
- `value`: the numeric value to represent
|
||||
"""
|
||||
|
||||
_type_marker = 18
|
@ -1,829 +0,0 @@
|
||||
# Copyright 2009-present MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Tools for using Python's :mod:`json` module with BSON documents.
|
||||
|
||||
This module provides two helper methods `dumps` and `loads` that wrap the
|
||||
native :mod:`json` methods and provide explicit BSON conversion to and from
|
||||
JSON. :class:`~bson.json_util.JSONOptions` provides a way to control how JSON
|
||||
is emitted and parsed, with the default being the legacy PyMongo format.
|
||||
:mod:`~bson.json_util` can also generate Canonical or Relaxed `Extended JSON`_
|
||||
when :const:`CANONICAL_JSON_OPTIONS` or :const:`RELAXED_JSON_OPTIONS` is
|
||||
provided, respectively.
|
||||
|
||||
.. _Extended JSON: https://github.com/mongodb/specifications/blob/master/source/extended-json.rst
|
||||
|
||||
Example usage (deserialization):
|
||||
|
||||
.. doctest::
|
||||
|
||||
>>> from bson.json_util import loads
|
||||
>>> loads('[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$scope": {}, "$code": "function x() { return 1; }"}}, {"bin": {"$type": "80", "$binary": "AQIDBA=="}}]')
|
||||
[{u'foo': [1, 2]}, {u'bar': {u'hello': u'world'}}, {u'code': Code('function x() { return 1; }', {})}, {u'bin': Binary('...', 128)}]
|
||||
|
||||
Example usage (serialization):
|
||||
|
||||
.. doctest::
|
||||
|
||||
>>> from bson import Binary, Code
|
||||
>>> from bson.json_util import dumps
|
||||
>>> dumps([{'foo': [1, 2]},
|
||||
... {'bar': {'hello': 'world'}},
|
||||
... {'code': Code("function x() { return 1; }", {})},
|
||||
... {'bin': Binary(b"\x01\x02\x03\x04")}])
|
||||
'[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }", "$scope": {}}}, {"bin": {"$binary": "AQIDBA==", "$type": "00"}}]'
|
||||
|
||||
Example usage (with :const:`CANONICAL_JSON_OPTIONS`):
|
||||
|
||||
.. doctest::
|
||||
|
||||
>>> from bson import Binary, Code
|
||||
>>> from bson.json_util import dumps, CANONICAL_JSON_OPTIONS
|
||||
>>> dumps([{'foo': [1, 2]},
|
||||
... {'bar': {'hello': 'world'}},
|
||||
... {'code': Code("function x() { return 1; }")},
|
||||
... {'bin': Binary(b"\x01\x02\x03\x04")}],
|
||||
... json_options=CANONICAL_JSON_OPTIONS)
|
||||
'[{"foo": [{"$numberInt": "1"}, {"$numberInt": "2"}]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }"}}, {"bin": {"$binary": {"base64": "AQIDBA==", "subType": "00"}}}]'
|
||||
|
||||
Example usage (with :const:`RELAXED_JSON_OPTIONS`):
|
||||
|
||||
.. doctest::
|
||||
|
||||
>>> from bson import Binary, Code
|
||||
>>> from bson.json_util import dumps, RELAXED_JSON_OPTIONS
|
||||
>>> dumps([{'foo': [1, 2]},
|
||||
... {'bar': {'hello': 'world'}},
|
||||
... {'code': Code("function x() { return 1; }")},
|
||||
... {'bin': Binary(b"\x01\x02\x03\x04")}],
|
||||
... json_options=RELAXED_JSON_OPTIONS)
|
||||
'[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }"}}, {"bin": {"$binary": {"base64": "AQIDBA==", "subType": "00"}}}]'
|
||||
|
||||
Alternatively, you can manually pass the `default` to :func:`json.dumps`.
|
||||
It won't handle :class:`~bson.binary.Binary` and :class:`~bson.code.Code`
|
||||
instances (as they are extended strings you can't provide custom defaults),
|
||||
but it will be faster as there is less recursion.
|
||||
|
||||
.. note::
|
||||
If your application does not need the flexibility offered by
|
||||
:class:`JSONOptions` and spends a large amount of time in the `json_util`
|
||||
module, look to
|
||||
`python-bsonjs <https://pypi.python.org/pypi/python-bsonjs>`_ for a nice
|
||||
performance improvement. `python-bsonjs` is a fast BSON to MongoDB
|
||||
Extended JSON converter for Python built on top of
|
||||
`libbson <https://github.com/mongodb/libbson>`_. `python-bsonjs` works best
|
||||
with PyMongo when using :class:`~bson.raw_bson.RawBSONDocument`.
|
||||
|
||||
.. versionchanged:: 2.8
|
||||
The output format for :class:`~bson.timestamp.Timestamp` has changed from
|
||||
'{"t": <int>, "i": <int>}' to '{"$timestamp": {"t": <int>, "i": <int>}}'.
|
||||
This new format will be decoded to an instance of
|
||||
:class:`~bson.timestamp.Timestamp`. The old format will continue to be
|
||||
decoded to a python dict as before. Encoding to the old format is no longer
|
||||
supported as it was never correct and loses type information.
|
||||
Added support for $numberLong and $undefined - new in MongoDB 2.6 - and
|
||||
parsing $date in ISO-8601 format.
|
||||
|
||||
.. versionchanged:: 2.7
|
||||
Preserves order when rendering SON, Timestamp, Code, Binary, and DBRef
|
||||
instances.
|
||||
|
||||
.. versionchanged:: 2.3
|
||||
Added dumps and loads helpers to automatically handle conversion to and
|
||||
from json and supports :class:`~bson.binary.Binary` and
|
||||
:class:`~bson.code.Code`
|
||||
"""
|
||||
|
||||
import base64
|
||||
import datetime
|
||||
import json
|
||||
import math
|
||||
import re
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
from pymongo.errors import ConfigurationError
|
||||
|
||||
import bson
|
||||
from bson import EPOCH_AWARE, EPOCH_NAIVE, RE_TYPE, SON
|
||||
from bson.binary import (Binary, JAVA_LEGACY, CSHARP_LEGACY, OLD_UUID_SUBTYPE,
|
||||
UUID_SUBTYPE)
|
||||
from bson.code import Code
|
||||
from bson.codec_options import CodecOptions
|
||||
from bson.dbref import DBRef
|
||||
from bson.decimal128 import Decimal128
|
||||
from bson.int64 import Int64
|
||||
from bson.max_key import MaxKey
|
||||
from bson.min_key import MinKey
|
||||
from bson.objectid import ObjectId
|
||||
from bson.py3compat import (PY3, iteritems, integer_types, string_type,
|
||||
text_type)
|
||||
from bson.regex import Regex
|
||||
from bson.timestamp import Timestamp
|
||||
from bson.tz_util import utc
|
||||
|
||||
|
||||
_RE_OPT_TABLE = {
|
||||
"i": re.I,
|
||||
"l": re.L,
|
||||
"m": re.M,
|
||||
"s": re.S,
|
||||
"u": re.U,
|
||||
"x": re.X,
|
||||
}
|
||||
|
||||
# Dollar-prefixed keys which may appear in DBRefs.
|
||||
_DBREF_KEYS = frozenset(['$id', '$ref', '$db'])
|
||||
|
||||
|
||||
class DatetimeRepresentation:
|
||||
LEGACY = 0
|
||||
"""Legacy MongoDB Extended JSON datetime representation.
|
||||
|
||||
:class:`datetime.datetime` instances will be encoded to JSON in the
|
||||
format `{"$date": <dateAsMilliseconds>}`, where `dateAsMilliseconds` is
|
||||
a 64-bit signed integer giving the number of milliseconds since the Unix
|
||||
epoch UTC. This was the default encoding before PyMongo version 3.4.
|
||||
|
||||
.. versionadded:: 3.4
|
||||
"""
|
||||
|
||||
NUMBERLONG = 1
|
||||
"""NumberLong datetime representation.
|
||||
|
||||
:class:`datetime.datetime` instances will be encoded to JSON in the
|
||||
format `{"$date": {"$numberLong": "<dateAsMilliseconds>"}}`,
|
||||
where `dateAsMilliseconds` is the string representation of a 64-bit signed
|
||||
integer giving the number of milliseconds since the Unix epoch UTC.
|
||||
|
||||
.. versionadded:: 3.4
|
||||
"""
|
||||
|
||||
ISO8601 = 2
|
||||
"""ISO-8601 datetime representation.
|
||||
|
||||
:class:`datetime.datetime` instances greater than or equal to the Unix
|
||||
epoch UTC will be encoded to JSON in the format `{"$date": "<ISO-8601>"}`.
|
||||
:class:`datetime.datetime` instances before the Unix epoch UTC will be
|
||||
encoded as if the datetime representation is
|
||||
:const:`~DatetimeRepresentation.NUMBERLONG`.
|
||||
|
||||
.. versionadded:: 3.4
|
||||
"""
|
||||
|
||||
|
||||
class JSONMode:
|
||||
LEGACY = 0
|
||||
"""Legacy Extended JSON representation.
|
||||
|
||||
In this mode, :func:`~bson.json_util.dumps` produces PyMongo's legacy
|
||||
non-standard JSON output. Consider using
|
||||
:const:`~bson.json_util.JSONMode.RELAXED` or
|
||||
:const:`~bson.json_util.JSONMode.CANONICAL` instead.
|
||||
|
||||
.. versionadded:: 3.5
|
||||
"""
|
||||
|
||||
RELAXED = 1
|
||||
"""Relaxed Extended JSON representation.
|
||||
|
||||
In this mode, :func:`~bson.json_util.dumps` produces Relaxed Extended JSON,
|
||||
a mostly JSON-like format. Consider using this for things like a web API,
|
||||
where one is sending a document (or a projection of a document) that only
|
||||
uses ordinary JSON type primitives. In particular, the ``int``,
|
||||
:class:`~bson.int64.Int64`, and ``float`` numeric types are represented in
|
||||
the native JSON number format. This output is also the most human readable
|
||||
and is useful for debugging and documentation.
|
||||
|
||||
.. seealso:: The specification for Relaxed `Extended JSON`_.
|
||||
|
||||
.. versionadded:: 3.5
|
||||
"""
|
||||
|
||||
CANONICAL = 2
|
||||
"""Canonical Extended JSON representation.
|
||||
|
||||
In this mode, :func:`~bson.json_util.dumps` produces Canonical Extended
|
||||
JSON, a type preserving format. Consider using this for things like
|
||||
testing, where one has to precisely specify expected types in JSON. In
|
||||
particular, the ``int``, :class:`~bson.int64.Int64`, and ``float`` numeric
|
||||
types are encoded with type wrappers.
|
||||
|
||||
.. seealso:: The specification for Canonical `Extended JSON`_.
|
||||
|
||||
.. versionadded:: 3.5
|
||||
"""
|
||||
|
||||
|
||||
class JSONOptions(CodecOptions):
|
||||
"""Encapsulates JSON options for :func:`dumps` and :func:`loads`.
|
||||
|
||||
:Parameters:
|
||||
- `strict_number_long`: If ``True``, :class:`~bson.int64.Int64` objects
|
||||
are encoded to MongoDB Extended JSON's *Strict mode* type
|
||||
`NumberLong`, ie ``'{"$numberLong": "<number>" }'``. Otherwise they
|
||||
will be encoded as an `int`. Defaults to ``False``.
|
||||
- `datetime_representation`: The representation to use when encoding
|
||||
instances of :class:`datetime.datetime`. Defaults to
|
||||
:const:`~DatetimeRepresentation.LEGACY`.
|
||||
- `strict_uuid`: If ``True``, :class:`uuid.UUID` object are encoded to
|
||||
MongoDB Extended JSON's *Strict mode* type `Binary`. Otherwise it
|
||||
will be encoded as ``'{"$uuid": "<hex>" }'``. Defaults to ``False``.
|
||||
- `json_mode`: The :class:`JSONMode` to use when encoding BSON types to
|
||||
Extended JSON. Defaults to :const:`~JSONMode.LEGACY`.
|
||||
- `document_class`: BSON documents returned by :func:`loads` will be
|
||||
decoded to an instance of this class. Must be a subclass of
|
||||
:class:`collections.MutableMapping`. Defaults to :class:`dict`.
|
||||
- `uuid_representation`: The BSON representation to use when encoding
|
||||
and decoding instances of :class:`uuid.UUID`. Defaults to
|
||||
:const:`~bson.binary.PYTHON_LEGACY`.
|
||||
- `tz_aware`: If ``True``, MongoDB Extended JSON's *Strict mode* type
|
||||
`Date` will be decoded to timezone aware instances of
|
||||
:class:`datetime.datetime`. Otherwise they will be naive. Defaults
|
||||
to ``True``.
|
||||
- `tzinfo`: A :class:`datetime.tzinfo` subclass that specifies the
|
||||
timezone from which :class:`~datetime.datetime` objects should be
|
||||
decoded. Defaults to :const:`~bson.tz_util.utc`.
|
||||
- `args`: arguments to :class:`~bson.codec_options.CodecOptions`
|
||||
- `kwargs`: arguments to :class:`~bson.codec_options.CodecOptions`
|
||||
|
||||
.. seealso:: The specification for Relaxed and Canonical `Extended JSON`_.
|
||||
|
||||
.. versionadded:: 3.4
|
||||
|
||||
.. versionchanged:: 3.5
|
||||
Accepts the optional parameter `json_mode`.
|
||||
|
||||
"""
|
||||
|
||||
def __new__(cls, strict_number_long=False,
|
||||
datetime_representation=DatetimeRepresentation.LEGACY,
|
||||
strict_uuid=False, json_mode=JSONMode.LEGACY,
|
||||
*args, **kwargs):
|
||||
kwargs["tz_aware"] = kwargs.get("tz_aware", True)
|
||||
if kwargs["tz_aware"]:
|
||||
kwargs["tzinfo"] = kwargs.get("tzinfo", utc)
|
||||
if datetime_representation not in (DatetimeRepresentation.LEGACY,
|
||||
DatetimeRepresentation.NUMBERLONG,
|
||||
DatetimeRepresentation.ISO8601):
|
||||
raise ConfigurationError(
|
||||
"JSONOptions.datetime_representation must be one of LEGACY, "
|
||||
"NUMBERLONG, or ISO8601 from DatetimeRepresentation.")
|
||||
self = super(JSONOptions, cls).__new__(cls, *args, **kwargs)
|
||||
if json_mode not in (JSONMode.LEGACY,
|
||||
JSONMode.RELAXED,
|
||||
JSONMode.CANONICAL):
|
||||
raise ConfigurationError(
|
||||
"JSONOptions.json_mode must be one of LEGACY, RELAXED, "
|
||||
"or CANONICAL from JSONMode.")
|
||||
self.json_mode = json_mode
|
||||
if self.json_mode == JSONMode.RELAXED:
|
||||
self.strict_number_long = False
|
||||
self.datetime_representation = DatetimeRepresentation.ISO8601
|
||||
self.strict_uuid = True
|
||||
elif self.json_mode == JSONMode.CANONICAL:
|
||||
self.strict_number_long = True
|
||||
self.datetime_representation = DatetimeRepresentation.NUMBERLONG
|
||||
self.strict_uuid = True
|
||||
else:
|
||||
self.strict_number_long = strict_number_long
|
||||
self.datetime_representation = datetime_representation
|
||||
self.strict_uuid = strict_uuid
|
||||
return self
|
||||
|
||||
def _arguments_repr(self):
|
||||
return ('strict_number_long=%r, '
|
||||
'datetime_representation=%r, '
|
||||
'strict_uuid=%r, json_mode=%r, %s' % (
|
||||
self.strict_number_long,
|
||||
self.datetime_representation,
|
||||
self.strict_uuid,
|
||||
self.json_mode,
|
||||
super(JSONOptions, self)._arguments_repr()))
|
||||
|
||||
|
||||
LEGACY_JSON_OPTIONS = JSONOptions(json_mode=JSONMode.LEGACY)
|
||||
""":class:`JSONOptions` for encoding to PyMongo's legacy JSON format.
|
||||
|
||||
.. seealso:: The documentation for :const:`bson.json_util.JSONMode.LEGACY`.
|
||||
|
||||
.. versionadded:: 3.5
|
||||
"""
|
||||
|
||||
DEFAULT_JSON_OPTIONS = LEGACY_JSON_OPTIONS
|
||||
"""The default :class:`JSONOptions` for JSON encoding/decoding.
|
||||
|
||||
The same as :const:`LEGACY_JSON_OPTIONS`. This will change to
|
||||
:const:`RELAXED_JSON_OPTIONS` in a future release.
|
||||
|
||||
.. versionadded:: 3.4
|
||||
"""
|
||||
|
||||
CANONICAL_JSON_OPTIONS = JSONOptions(json_mode=JSONMode.CANONICAL)
|
||||
""":class:`JSONOptions` for Canonical Extended JSON.
|
||||
|
||||
.. seealso:: The documentation for :const:`bson.json_util.JSONMode.CANONICAL`.
|
||||
|
||||
.. versionadded:: 3.5
|
||||
"""
|
||||
|
||||
RELAXED_JSON_OPTIONS = JSONOptions(json_mode=JSONMode.RELAXED)
|
||||
""":class:`JSONOptions` for Relaxed Extended JSON.
|
||||
|
||||
.. seealso:: The documentation for :const:`bson.json_util.JSONMode.RELAXED`.
|
||||
|
||||
.. versionadded:: 3.5
|
||||
"""
|
||||
|
||||
STRICT_JSON_OPTIONS = JSONOptions(
|
||||
strict_number_long=True,
|
||||
datetime_representation=DatetimeRepresentation.ISO8601,
|
||||
strict_uuid=True)
|
||||
"""**DEPRECATED** - :class:`JSONOptions` for MongoDB Extended JSON's *Strict
|
||||
mode* encoding.
|
||||
|
||||
.. versionadded:: 3.4
|
||||
|
||||
.. versionchanged:: 3.5
|
||||
Deprecated. Use :const:`RELAXED_JSON_OPTIONS` or
|
||||
:const:`CANONICAL_JSON_OPTIONS` instead.
|
||||
"""
|
||||
|
||||
|
||||
def dumps(obj, *args, **kwargs):
|
||||
"""Helper function that wraps :func:`json.dumps`.
|
||||
|
||||
Recursive function that handles all BSON types including
|
||||
:class:`~bson.binary.Binary` and :class:`~bson.code.Code`.
|
||||
|
||||
:Parameters:
|
||||
- `json_options`: A :class:`JSONOptions` instance used to modify the
|
||||
encoding of MongoDB Extended JSON types. Defaults to
|
||||
:const:`DEFAULT_JSON_OPTIONS`.
|
||||
|
||||
.. versionchanged:: 3.4
|
||||
Accepts optional parameter `json_options`. See :class:`JSONOptions`.
|
||||
|
||||
.. versionchanged:: 2.7
|
||||
Preserves order when rendering SON, Timestamp, Code, Binary, and DBRef
|
||||
instances.
|
||||
"""
|
||||
json_options = kwargs.pop("json_options", DEFAULT_JSON_OPTIONS)
|
||||
return json.dumps(_json_convert(obj, json_options), *args, **kwargs)
|
||||
|
||||
|
||||
def loads(s, *args, **kwargs):
|
||||
"""Helper function that wraps :func:`json.loads`.
|
||||
|
||||
Automatically passes the object_hook for BSON type conversion.
|
||||
|
||||
Raises ``TypeError``, ``ValueError``, ``KeyError``, or
|
||||
:exc:`~bson.errors.InvalidId` on invalid MongoDB Extended JSON.
|
||||
|
||||
:Parameters:
|
||||
- `json_options`: A :class:`JSONOptions` instance used to modify the
|
||||
decoding of MongoDB Extended JSON types. Defaults to
|
||||
:const:`DEFAULT_JSON_OPTIONS`.
|
||||
|
||||
.. versionchanged:: 3.5
|
||||
Parses Relaxed and Canonical Extended JSON as well as PyMongo's legacy
|
||||
format. Now raises ``TypeError`` or ``ValueError`` when parsing JSON
|
||||
type wrappers with values of the wrong type or any extra keys.
|
||||
|
||||
.. versionchanged:: 3.4
|
||||
Accepts optional parameter `json_options`. See :class:`JSONOptions`.
|
||||
"""
|
||||
json_options = kwargs.pop("json_options", DEFAULT_JSON_OPTIONS)
|
||||
kwargs["object_pairs_hook"] = lambda pairs: object_pairs_hook(
|
||||
pairs, json_options)
|
||||
return json.loads(s, *args, **kwargs)
|
||||
|
||||
|
||||
def _json_convert(obj, json_options=DEFAULT_JSON_OPTIONS):
|
||||
"""Recursive helper method that converts BSON types so they can be
|
||||
converted into json.
|
||||
"""
|
||||
if hasattr(obj, 'iteritems') or hasattr(obj, 'items'): # PY3 support
|
||||
return SON(((k, _json_convert(v, json_options))
|
||||
for k, v in iteritems(obj)))
|
||||
elif hasattr(obj, '__iter__') and not isinstance(obj, (text_type, bytes)):
|
||||
return list((_json_convert(v, json_options) for v in obj))
|
||||
try:
|
||||
return default(obj, json_options)
|
||||
except TypeError:
|
||||
return obj
|
||||
|
||||
|
||||
def object_pairs_hook(pairs, json_options=DEFAULT_JSON_OPTIONS):
|
||||
return object_hook(json_options.document_class(pairs), json_options)
|
||||
|
||||
|
||||
def object_hook(dct, json_options=DEFAULT_JSON_OPTIONS):
|
||||
if "$oid" in dct:
|
||||
return _parse_canonical_oid(dct)
|
||||
if "$ref" in dct:
|
||||
return _parse_canonical_dbref(dct)
|
||||
if "$date" in dct:
|
||||
return _parse_canonical_datetime(dct, json_options)
|
||||
if "$regex" in dct:
|
||||
return _parse_legacy_regex(dct)
|
||||
if "$minKey" in dct:
|
||||
return _parse_canonical_minkey(dct)
|
||||
if "$maxKey" in dct:
|
||||
return _parse_canonical_maxkey(dct)
|
||||
if "$binary" in dct:
|
||||
if "$type" in dct:
|
||||
return _parse_legacy_binary(dct, json_options)
|
||||
else:
|
||||
return _parse_canonical_binary(dct, json_options)
|
||||
if "$code" in dct:
|
||||
return _parse_canonical_code(dct)
|
||||
if "$uuid" in dct:
|
||||
return _parse_legacy_uuid(dct)
|
||||
if "$undefined" in dct:
|
||||
return None
|
||||
if "$numberLong" in dct:
|
||||
return _parse_canonical_int64(dct)
|
||||
if "$timestamp" in dct:
|
||||
tsp = dct["$timestamp"]
|
||||
return Timestamp(tsp["t"], tsp["i"])
|
||||
if "$numberDecimal" in dct:
|
||||
return _parse_canonical_decimal128(dct)
|
||||
if "$dbPointer" in dct:
|
||||
return _parse_canonical_dbpointer(dct)
|
||||
if "$regularExpression" in dct:
|
||||
return _parse_canonical_regex(dct)
|
||||
if "$symbol" in dct:
|
||||
return _parse_canonical_symbol(dct)
|
||||
if "$numberInt" in dct:
|
||||
return _parse_canonical_int32(dct)
|
||||
if "$numberDouble" in dct:
|
||||
return _parse_canonical_double(dct)
|
||||
return dct
|
||||
|
||||
|
||||
def _parse_legacy_regex(doc):
|
||||
pattern = doc["$regex"]
|
||||
# Check if this is the $regex query operator.
|
||||
if isinstance(pattern, Regex):
|
||||
return doc
|
||||
flags = 0
|
||||
# PyMongo always adds $options but some other tools may not.
|
||||
for opt in doc.get("$options", ""):
|
||||
flags |= _RE_OPT_TABLE.get(opt, 0)
|
||||
return Regex(pattern, flags)
|
||||
|
||||
|
||||
def _parse_legacy_uuid(doc):
|
||||
"""Decode a JSON legacy $uuid to Python UUID."""
|
||||
if len(doc) != 1:
|
||||
raise TypeError('Bad $uuid, extra field(s): %s' % (doc,))
|
||||
return uuid.UUID(doc["$uuid"])
|
||||
|
||||
|
||||
def _binary_or_uuid(data, subtype, json_options):
|
||||
# special handling for UUID
|
||||
if subtype == OLD_UUID_SUBTYPE:
|
||||
if json_options.uuid_representation == CSHARP_LEGACY:
|
||||
return uuid.UUID(bytes_le=data)
|
||||
if json_options.uuid_representation == JAVA_LEGACY:
|
||||
data = data[7::-1] + data[:7:-1]
|
||||
return uuid.UUID(bytes=data)
|
||||
if subtype == UUID_SUBTYPE:
|
||||
return uuid.UUID(bytes=data)
|
||||
if PY3 and subtype == 0:
|
||||
return data
|
||||
return Binary(data, subtype)
|
||||
|
||||
|
||||
def _parse_legacy_binary(doc, json_options):
|
||||
if isinstance(doc["$type"], int):
|
||||
doc["$type"] = "%02x" % doc["$type"]
|
||||
subtype = int(doc["$type"], 16)
|
||||
if subtype >= 0xffffff80: # Handle mongoexport values
|
||||
subtype = int(doc["$type"][6:], 16)
|
||||
data = base64.b64decode(doc["$binary"].encode())
|
||||
return _binary_or_uuid(data, subtype, json_options)
|
||||
|
||||
|
||||
def _parse_canonical_binary(doc, json_options):
|
||||
binary = doc["$binary"]
|
||||
b64 = binary["base64"]
|
||||
subtype = binary["subType"]
|
||||
if not isinstance(b64, string_type):
|
||||
raise TypeError('$binary base64 must be a string: %s' % (doc,))
|
||||
if not isinstance(subtype, string_type) or len(subtype) > 2:
|
||||
raise TypeError('$binary subType must be a string at most 2 '
|
||||
'characters: %s' % (doc,))
|
||||
if len(binary) != 2:
|
||||
raise TypeError('$binary must include only "base64" and "subType" '
|
||||
'components: %s' % (doc,))
|
||||
|
||||
data = base64.b64decode(b64.encode())
|
||||
return _binary_or_uuid(data, int(subtype, 16), json_options)
|
||||
|
||||
|
||||
def _parse_canonical_datetime(doc, json_options):
|
||||
"""Decode a JSON datetime to python datetime.datetime."""
|
||||
dtm = doc["$date"]
|
||||
if len(doc) != 1:
|
||||
raise TypeError('Bad $date, extra field(s): %s' % (doc,))
|
||||
# mongoexport 2.6 and newer
|
||||
if isinstance(dtm, string_type):
|
||||
# Parse offset
|
||||
if dtm[-1] == 'Z':
|
||||
dt = dtm[:-1]
|
||||
offset = 'Z'
|
||||
elif dtm[-6] in ('+', '-') and dtm[-3] == ':':
|
||||
# (+|-)HH:MM
|
||||
dt = dtm[:-6]
|
||||
offset = dtm[-6:]
|
||||
elif dtm[-5] in ('+', '-'):
|
||||
# (+|-)HHMM
|
||||
dt = dtm[:-5]
|
||||
offset = dtm[-5:]
|
||||
elif dtm[-3] in ('+', '-'):
|
||||
# (+|-)HH
|
||||
dt = dtm[:-3]
|
||||
offset = dtm[-3:]
|
||||
else:
|
||||
dt = dtm
|
||||
offset = ''
|
||||
|
||||
# Parse the optional factional seconds portion.
|
||||
dot_index = dt.rfind('.')
|
||||
microsecond = 0
|
||||
if dot_index != -1:
|
||||
microsecond = int(float(dt[dot_index:]) * 1000000)
|
||||
dt = dt[:dot_index]
|
||||
|
||||
aware = datetime.datetime.strptime(
|
||||
dt, "%Y-%m-%dT%H:%M:%S").replace(microsecond=microsecond,
|
||||
tzinfo=utc)
|
||||
|
||||
if offset and offset != 'Z':
|
||||
if len(offset) == 6:
|
||||
hours, minutes = offset[1:].split(':')
|
||||
secs = (int(hours) * 3600 + int(minutes) * 60)
|
||||
elif len(offset) == 5:
|
||||
secs = (int(offset[1:3]) * 3600 + int(offset[3:]) * 60)
|
||||
elif len(offset) == 3:
|
||||
secs = int(offset[1:3]) * 3600
|
||||
if offset[0] == "-":
|
||||
secs *= -1
|
||||
aware = aware - datetime.timedelta(seconds=secs)
|
||||
|
||||
if json_options.tz_aware:
|
||||
if json_options.tzinfo:
|
||||
aware = aware.astimezone(json_options.tzinfo)
|
||||
return aware
|
||||
else:
|
||||
return aware.replace(tzinfo=None)
|
||||
return bson._millis_to_datetime(int(dtm), json_options)
|
||||
|
||||
|
||||
def _parse_canonical_oid(doc):
|
||||
"""Decode a JSON ObjectId to bson.objectid.ObjectId."""
|
||||
if len(doc) != 1:
|
||||
raise TypeError('Bad $oid, extra field(s): %s' % (doc,))
|
||||
return ObjectId(doc['$oid'])
|
||||
|
||||
|
||||
def _parse_canonical_symbol(doc):
|
||||
"""Decode a JSON symbol to Python string."""
|
||||
symbol = doc['$symbol']
|
||||
if len(doc) != 1:
|
||||
raise TypeError('Bad $symbol, extra field(s): %s' % (doc,))
|
||||
return text_type(symbol)
|
||||
|
||||
|
||||
def _parse_canonical_code(doc):
|
||||
"""Decode a JSON code to bson.code.Code."""
|
||||
for key in doc:
|
||||
if key not in ('$code', '$scope'):
|
||||
raise TypeError('Bad $code, extra field(s): %s' % (doc,))
|
||||
return Code(doc['$code'], scope=doc.get('$scope'))
|
||||
|
||||
|
||||
def _parse_canonical_regex(doc):
|
||||
"""Decode a JSON regex to bson.regex.Regex."""
|
||||
regex = doc['$regularExpression']
|
||||
if len(doc) != 1:
|
||||
raise TypeError('Bad $regularExpression, extra field(s): %s' % (doc,))
|
||||
if len(regex) != 2:
|
||||
raise TypeError('Bad $regularExpression must include only "pattern"'
|
||||
'and "options" components: %s' % (doc,))
|
||||
return Regex(regex['pattern'], regex['options'])
|
||||
|
||||
|
||||
def _parse_canonical_dbref(doc):
|
||||
"""Decode a JSON DBRef to bson.dbref.DBRef."""
|
||||
for key in doc:
|
||||
if key.startswith('$') and key not in _DBREF_KEYS:
|
||||
# Other keys start with $, so dct cannot be parsed as a DBRef.
|
||||
return doc
|
||||
return DBRef(doc.pop('$ref'), doc.pop('$id'),
|
||||
database=doc.pop('$db', None), **doc)
|
||||
|
||||
|
||||
def _parse_canonical_dbpointer(doc):
|
||||
"""Decode a JSON (deprecated) DBPointer to bson.dbref.DBRef."""
|
||||
dbref = doc['$dbPointer']
|
||||
if len(doc) != 1:
|
||||
raise TypeError('Bad $dbPointer, extra field(s): %s' % (doc,))
|
||||
if isinstance(dbref, DBRef):
|
||||
dbref_doc = dbref.as_doc()
|
||||
# DBPointer must not contain $db in its value.
|
||||
if dbref.database is not None:
|
||||
raise TypeError(
|
||||
'Bad $dbPointer, extra field $db: %s' % (dbref_doc,))
|
||||
if not isinstance(dbref.id, ObjectId):
|
||||
raise TypeError(
|
||||
'Bad $dbPointer, $id must be an ObjectId: %s' % (dbref_doc,))
|
||||
if len(dbref_doc) != 2:
|
||||
raise TypeError(
|
||||
'Bad $dbPointer, extra field(s) in DBRef: %s' % (dbref_doc,))
|
||||
return dbref
|
||||
else:
|
||||
raise TypeError('Bad $dbPointer, expected a DBRef: %s' % (doc,))
|
||||
|
||||
|
||||
def _parse_canonical_int32(doc):
|
||||
"""Decode a JSON int32 to python int."""
|
||||
i_str = doc['$numberInt']
|
||||
if len(doc) != 1:
|
||||
raise TypeError('Bad $numberInt, extra field(s): %s' % (doc,))
|
||||
if not isinstance(i_str, string_type):
|
||||
raise TypeError('$numberInt must be string: %s' % (doc,))
|
||||
return int(i_str)
|
||||
|
||||
|
||||
def _parse_canonical_int64(doc):
|
||||
"""Decode a JSON int64 to bson.int64.Int64."""
|
||||
l_str = doc['$numberLong']
|
||||
if len(doc) != 1:
|
||||
raise TypeError('Bad $numberLong, extra field(s): %s' % (doc,))
|
||||
return Int64(l_str)
|
||||
|
||||
|
||||
def _parse_canonical_double(doc):
|
||||
"""Decode a JSON double to python float."""
|
||||
d_str = doc['$numberDouble']
|
||||
if len(doc) != 1:
|
||||
raise TypeError('Bad $numberDouble, extra field(s): %s' % (doc,))
|
||||
if not isinstance(d_str, string_type):
|
||||
raise TypeError('$numberDouble must be string: %s' % (doc,))
|
||||
return float(d_str)
|
||||
|
||||
|
||||
def _parse_canonical_decimal128(doc):
|
||||
"""Decode a JSON decimal128 to bson.decimal128.Decimal128."""
|
||||
d_str = doc['$numberDecimal']
|
||||
if len(doc) != 1:
|
||||
raise TypeError('Bad $numberDecimal, extra field(s): %s' % (doc,))
|
||||
if not isinstance(d_str, string_type):
|
||||
raise TypeError('$numberDecimal must be string: %s' % (doc,))
|
||||
return Decimal128(d_str)
|
||||
|
||||
|
||||
def _parse_canonical_minkey(doc):
|
||||
"""Decode a JSON MinKey to bson.min_key.MinKey."""
|
||||
if type(doc['$minKey']) is not int or doc['$minKey'] != 1:
|
||||
raise TypeError('$minKey value must be 1: %s' % (doc,))
|
||||
if len(doc) != 1:
|
||||
raise TypeError('Bad $minKey, extra field(s): %s' % (doc,))
|
||||
return MinKey()
|
||||
|
||||
|
||||
def _parse_canonical_maxkey(doc):
|
||||
"""Decode a JSON MaxKey to bson.max_key.MaxKey."""
|
||||
if type(doc['$maxKey']) is not int or doc['$maxKey'] != 1:
|
||||
raise TypeError('$maxKey value must be 1: %s', (doc,))
|
||||
if len(doc) != 1:
|
||||
raise TypeError('Bad $minKey, extra field(s): %s' % (doc,))
|
||||
return MaxKey()
|
||||
|
||||
|
||||
def _encode_binary(data, subtype, json_options):
|
||||
if json_options.json_mode == JSONMode.LEGACY:
|
||||
return SON([
|
||||
('$binary', base64.b64encode(data).decode()),
|
||||
('$type', "%02x" % subtype)])
|
||||
return {'$binary': SON([
|
||||
('base64', base64.b64encode(data).decode()),
|
||||
('subType', "%02x" % subtype)])}
|
||||
|
||||
|
||||
def default(obj, json_options=DEFAULT_JSON_OPTIONS):
|
||||
# We preserve key order when rendering SON, DBRef, etc. as JSON by
|
||||
# returning a SON for those types instead of a dict.
|
||||
if isinstance(obj, ObjectId):
|
||||
return {"$oid": str(obj)}
|
||||
if isinstance(obj, DBRef):
|
||||
return _json_convert(obj.as_doc(), json_options=json_options)
|
||||
if isinstance(obj, datetime.datetime):
|
||||
if (json_options.datetime_representation ==
|
||||
DatetimeRepresentation.ISO8601):
|
||||
if not obj.tzinfo:
|
||||
obj = obj.replace(tzinfo=utc)
|
||||
if obj >= EPOCH_AWARE:
|
||||
off = obj.tzinfo.utcoffset(obj)
|
||||
if (off.days, off.seconds, off.microseconds) == (0, 0, 0):
|
||||
tz_string = 'Z'
|
||||
else:
|
||||
tz_string = obj.strftime('%z')
|
||||
millis = int(obj.microsecond / 1000)
|
||||
fracsecs = ".%03d" % (millis,) if millis else ""
|
||||
return {"$date": "%s%s%s" % (
|
||||
obj.strftime("%Y-%m-%dT%H:%M:%S"), fracsecs, tz_string)}
|
||||
|
||||
millis = bson._datetime_to_millis(obj)
|
||||
if (json_options.datetime_representation ==
|
||||
DatetimeRepresentation.LEGACY):
|
||||
return {"$date": millis}
|
||||
return {"$date": {"$numberLong": str(millis)}}
|
||||
if json_options.strict_number_long and isinstance(obj, Int64):
|
||||
return {"$numberLong": str(obj)}
|
||||
if isinstance(obj, (RE_TYPE, Regex)):
|
||||
flags = ""
|
||||
if obj.flags & re.IGNORECASE:
|
||||
flags += "i"
|
||||
if obj.flags & re.LOCALE:
|
||||
flags += "l"
|
||||
if obj.flags & re.MULTILINE:
|
||||
flags += "m"
|
||||
if obj.flags & re.DOTALL:
|
||||
flags += "s"
|
||||
if obj.flags & re.UNICODE:
|
||||
flags += "u"
|
||||
if obj.flags & re.VERBOSE:
|
||||
flags += "x"
|
||||
if isinstance(obj.pattern, text_type):
|
||||
pattern = obj.pattern
|
||||
else:
|
||||
pattern = obj.pattern.decode('utf-8')
|
||||
if json_options.json_mode == JSONMode.LEGACY:
|
||||
return SON([("$regex", pattern), ("$options", flags)])
|
||||
return {'$regularExpression': SON([("pattern", pattern),
|
||||
("options", flags)])}
|
||||
if isinstance(obj, MinKey):
|
||||
return {"$minKey": 1}
|
||||
if isinstance(obj, MaxKey):
|
||||
return {"$maxKey": 1}
|
||||
if isinstance(obj, Timestamp):
|
||||
return {"$timestamp": SON([("t", obj.time), ("i", obj.inc)])}
|
||||
if isinstance(obj, Code):
|
||||
if obj.scope is None:
|
||||
return {'$code': str(obj)}
|
||||
return SON([
|
||||
('$code', str(obj)),
|
||||
('$scope', _json_convert(obj.scope, json_options))])
|
||||
if isinstance(obj, Binary):
|
||||
return _encode_binary(obj, obj.subtype, json_options)
|
||||
if PY3 and isinstance(obj, bytes):
|
||||
return _encode_binary(obj, 0, json_options)
|
||||
if isinstance(obj, uuid.UUID):
|
||||
if json_options.strict_uuid:
|
||||
data = obj.bytes
|
||||
subtype = OLD_UUID_SUBTYPE
|
||||
if json_options.uuid_representation == CSHARP_LEGACY:
|
||||
data = obj.bytes_le
|
||||
elif json_options.uuid_representation == JAVA_LEGACY:
|
||||
data = data[7::-1] + data[:7:-1]
|
||||
elif json_options.uuid_representation == UUID_SUBTYPE:
|
||||
subtype = UUID_SUBTYPE
|
||||
return _encode_binary(data, subtype, json_options)
|
||||
else:
|
||||
return {"$uuid": obj.hex}
|
||||
if isinstance(obj, Decimal128):
|
||||
return {"$numberDecimal": str(obj)}
|
||||
if isinstance(obj, bool):
|
||||
return obj
|
||||
if (json_options.json_mode == JSONMode.CANONICAL and
|
||||
isinstance(obj, integer_types)):
|
||||
if -2 ** 31 <= obj < 2 ** 31:
|
||||
return {'$numberInt': text_type(obj)}
|
||||
return {'$numberLong': text_type(obj)}
|
||||
if json_options.json_mode != JSONMode.LEGACY and isinstance(obj, float):
|
||||
if math.isnan(obj):
|
||||
return {'$numberDouble': 'NaN'}
|
||||
elif math.isinf(obj):
|
||||
representation = 'Infinity' if obj > 0 else '-Infinity'
|
||||
return {'$numberDouble': representation}
|
||||
elif json_options.json_mode == JSONMode.CANONICAL:
|
||||
# repr() will return the shortest string guaranteed to produce the
|
||||
# original value, when float() is called on it. str produces a
|
||||
# shorter string in Python 2.
|
||||
return {'$numberDouble': text_type(repr(obj))}
|
||||
raise TypeError("%r is not JSON serializable" % obj)
|
@ -1,50 +0,0 @@
|
||||
# Copyright 2010-present MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Representation for the MongoDB internal MaxKey type.
|
||||
"""
|
||||
|
||||
|
||||
class MaxKey(object):
|
||||
"""MongoDB internal MaxKey type.
|
||||
|
||||
.. versionchanged:: 2.7
|
||||
``MaxKey`` now implements comparison operators.
|
||||
"""
|
||||
|
||||
_type_marker = 127
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, MaxKey)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._type_marker)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __le__(self, other):
|
||||
return isinstance(other, MaxKey)
|
||||
|
||||
def __lt__(self, dummy):
|
||||
return False
|
||||
|
||||
def __ge__(self, dummy):
|
||||
return True
|
||||
|
||||
def __gt__(self, other):
|
||||
return not isinstance(other, MaxKey)
|
||||
|
||||
def __repr__(self):
|
||||
return "MaxKey()"
|
@ -1,50 +0,0 @@
|
||||
# Copyright 2010-present MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Representation for the MongoDB internal MinKey type.
|
||||
"""
|
||||
|
||||
|
||||
class MinKey(object):
|
||||
"""MongoDB internal MinKey type.
|
||||
|
||||
.. versionchanged:: 2.7
|
||||
``MinKey`` now implements comparison operators.
|
||||
"""
|
||||
|
||||
_type_marker = 255
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, MinKey)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._type_marker)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __le__(self, dummy):
|
||||
return True
|
||||
|
||||
def __lt__(self, other):
|
||||
return not isinstance(other, MinKey)
|
||||
|
||||
def __ge__(self, other):
|
||||
return isinstance(other, MinKey)
|
||||
|
||||
def __gt__(self, dummy):
|
||||
return False
|
||||
|
||||
def __repr__(self):
|
||||
return "MinKey()"
|
@ -1,299 +0,0 @@
|
||||
# Copyright 2009-2015 MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Tools for working with MongoDB `ObjectIds
|
||||
<http://dochub.mongodb.org/core/objectids>`_.
|
||||
"""
|
||||
|
||||
import binascii
|
||||
import calendar
|
||||
import datetime
|
||||
import os
|
||||
import struct
|
||||
import threading
|
||||
import time
|
||||
|
||||
from random import SystemRandom
|
||||
|
||||
from bson.errors import InvalidId
|
||||
from bson.py3compat import PY3, bytes_from_hex, string_type, text_type
|
||||
from bson.tz_util import utc
|
||||
|
||||
|
||||
_MAX_COUNTER_VALUE = 0xFFFFFF
|
||||
|
||||
|
||||
def _raise_invalid_id(oid):
|
||||
raise InvalidId(
|
||||
"%r is not a valid ObjectId, it must be a 12-byte input"
|
||||
" or a 24-character hex string" % oid)
|
||||
|
||||
|
||||
def _random_bytes():
|
||||
"""Get the 5-byte random field of an ObjectId."""
|
||||
return os.urandom(5)
|
||||
|
||||
|
||||
class ObjectId(object):
|
||||
"""A MongoDB ObjectId.
|
||||
"""
|
||||
|
||||
_pid = os.getpid()
|
||||
|
||||
_inc = SystemRandom().randint(0, _MAX_COUNTER_VALUE)
|
||||
_inc_lock = threading.Lock()
|
||||
|
||||
__random = _random_bytes()
|
||||
|
||||
__slots__ = ('__id',)
|
||||
|
||||
_type_marker = 7
|
||||
|
||||
def __init__(self, oid=None):
|
||||
"""Initialize a new ObjectId.
|
||||
|
||||
An ObjectId is a 12-byte unique identifier consisting of:
|
||||
|
||||
- a 4-byte value representing the seconds since the Unix epoch,
|
||||
- a 5-byte random value,
|
||||
- a 3-byte counter, starting with a random value.
|
||||
|
||||
By default, ``ObjectId()`` creates a new unique identifier. The
|
||||
optional parameter `oid` can be an :class:`ObjectId`, or any 12
|
||||
:class:`bytes` or, in Python 2, any 12-character :class:`str`.
|
||||
|
||||
For example, the 12 bytes b'foo-bar-quux' do not follow the ObjectId
|
||||
specification but they are acceptable input::
|
||||
|
||||
>>> ObjectId(b'foo-bar-quux')
|
||||
ObjectId('666f6f2d6261722d71757578')
|
||||
|
||||
`oid` can also be a :class:`unicode` or :class:`str` of 24 hex digits::
|
||||
|
||||
>>> ObjectId('0123456789ab0123456789ab')
|
||||
ObjectId('0123456789ab0123456789ab')
|
||||
>>>
|
||||
>>> # A u-prefixed unicode literal:
|
||||
>>> ObjectId(u'0123456789ab0123456789ab')
|
||||
ObjectId('0123456789ab0123456789ab')
|
||||
|
||||
Raises :class:`~bson.errors.InvalidId` if `oid` is not 12 bytes nor
|
||||
24 hex digits, or :class:`TypeError` if `oid` is not an accepted type.
|
||||
|
||||
:Parameters:
|
||||
- `oid` (optional): a valid ObjectId.
|
||||
|
||||
.. mongodoc:: objectids
|
||||
|
||||
.. versionchanged:: 3.8
|
||||
:class:`~bson.objectid.ObjectId` now implements the `ObjectID
|
||||
specification version 0.2
|
||||
<https://github.com/mongodb/specifications/blob/master/source/
|
||||
objectid.rst>`_.
|
||||
"""
|
||||
if oid is None:
|
||||
self.__generate()
|
||||
elif isinstance(oid, bytes) and len(oid) == 12:
|
||||
self.__id = oid
|
||||
else:
|
||||
self.__validate(oid)
|
||||
|
||||
@classmethod
|
||||
def from_datetime(cls, generation_time):
|
||||
"""Create a dummy ObjectId instance with a specific generation time.
|
||||
|
||||
This method is useful for doing range queries on a field
|
||||
containing :class:`ObjectId` instances.
|
||||
|
||||
.. warning::
|
||||
It is not safe to insert a document containing an ObjectId
|
||||
generated using this method. This method deliberately
|
||||
eliminates the uniqueness guarantee that ObjectIds
|
||||
generally provide. ObjectIds generated with this method
|
||||
should be used exclusively in queries.
|
||||
|
||||
`generation_time` will be converted to UTC. Naive datetime
|
||||
instances will be treated as though they already contain UTC.
|
||||
|
||||
An example using this helper to get documents where ``"_id"``
|
||||
was generated before January 1, 2010 would be:
|
||||
|
||||
>>> gen_time = datetime.datetime(2010, 1, 1)
|
||||
>>> dummy_id = ObjectId.from_datetime(gen_time)
|
||||
>>> result = collection.find({"_id": {"$lt": dummy_id}})
|
||||
|
||||
:Parameters:
|
||||
- `generation_time`: :class:`~datetime.datetime` to be used
|
||||
as the generation time for the resulting ObjectId.
|
||||
"""
|
||||
if generation_time.utcoffset() is not None:
|
||||
generation_time = generation_time - generation_time.utcoffset()
|
||||
timestamp = calendar.timegm(generation_time.timetuple())
|
||||
oid = struct.pack(
|
||||
">I", int(timestamp)) + b"\x00\x00\x00\x00\x00\x00\x00\x00"
|
||||
return cls(oid)
|
||||
|
||||
@classmethod
|
||||
def is_valid(cls, oid):
|
||||
"""Checks if a `oid` string is valid or not.
|
||||
|
||||
:Parameters:
|
||||
- `oid`: the object id to validate
|
||||
|
||||
.. versionadded:: 2.3
|
||||
"""
|
||||
if not oid:
|
||||
return False
|
||||
|
||||
try:
|
||||
ObjectId(oid)
|
||||
return True
|
||||
except (InvalidId, TypeError):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def _random(cls):
|
||||
"""Generate a 5-byte random number once per process.
|
||||
"""
|
||||
pid = os.getpid()
|
||||
if pid != cls._pid:
|
||||
cls._pid = pid
|
||||
cls.__random = _random_bytes()
|
||||
return cls.__random
|
||||
|
||||
def __generate(self):
|
||||
"""Generate a new value for this ObjectId.
|
||||
"""
|
||||
|
||||
# 4 bytes current time
|
||||
oid = struct.pack(">I", int(time.time()))
|
||||
|
||||
# 5 bytes random
|
||||
oid += ObjectId._random()
|
||||
|
||||
# 3 bytes inc
|
||||
with ObjectId._inc_lock:
|
||||
oid += struct.pack(">I", ObjectId._inc)[1:4]
|
||||
ObjectId._inc = (ObjectId._inc + 1) % (_MAX_COUNTER_VALUE + 1)
|
||||
|
||||
self.__id = oid
|
||||
|
||||
def __validate(self, oid):
|
||||
"""Validate and use the given id for this ObjectId.
|
||||
|
||||
Raises TypeError if id is not an instance of
|
||||
(:class:`basestring` (:class:`str` or :class:`bytes`
|
||||
in python 3), ObjectId) and InvalidId if it is not a
|
||||
valid ObjectId.
|
||||
|
||||
:Parameters:
|
||||
- `oid`: a valid ObjectId
|
||||
"""
|
||||
if isinstance(oid, ObjectId):
|
||||
self.__id = oid.binary
|
||||
# bytes or unicode in python 2, str in python 3
|
||||
elif isinstance(oid, string_type):
|
||||
if len(oid) == 24:
|
||||
try:
|
||||
self.__id = bytes_from_hex(oid)
|
||||
except (TypeError, ValueError):
|
||||
_raise_invalid_id(oid)
|
||||
else:
|
||||
_raise_invalid_id(oid)
|
||||
else:
|
||||
raise TypeError("id must be an instance of (bytes, %s, ObjectId), "
|
||||
"not %s" % (text_type.__name__, type(oid)))
|
||||
|
||||
@property
|
||||
def binary(self):
|
||||
"""12-byte binary representation of this ObjectId.
|
||||
"""
|
||||
return self.__id
|
||||
|
||||
@property
|
||||
def generation_time(self):
|
||||
"""A :class:`datetime.datetime` instance representing the time of
|
||||
generation for this :class:`ObjectId`.
|
||||
|
||||
The :class:`datetime.datetime` is timezone aware, and
|
||||
represents the generation time in UTC. It is precise to the
|
||||
second.
|
||||
"""
|
||||
timestamp = struct.unpack(">I", self.__id[0:4])[0]
|
||||
return datetime.datetime.fromtimestamp(timestamp, utc)
|
||||
|
||||
def __getstate__(self):
|
||||
"""return value of object for pickling.
|
||||
needed explicitly because __slots__() defined.
|
||||
"""
|
||||
return self.__id
|
||||
|
||||
def __setstate__(self, value):
|
||||
"""explicit state set from pickling
|
||||
"""
|
||||
# Provide backwards compatability with OIDs
|
||||
# pickled with pymongo-1.9 or older.
|
||||
if isinstance(value, dict):
|
||||
oid = value["_ObjectId__id"]
|
||||
else:
|
||||
oid = value
|
||||
# ObjectIds pickled in python 2.x used `str` for __id.
|
||||
# In python 3.x this has to be converted to `bytes`
|
||||
# by encoding latin-1.
|
||||
if PY3 and isinstance(oid, text_type):
|
||||
self.__id = oid.encode('latin-1')
|
||||
else:
|
||||
self.__id = oid
|
||||
|
||||
def __str__(self):
|
||||
if PY3:
|
||||
return binascii.hexlify(self.__id).decode()
|
||||
return binascii.hexlify(self.__id)
|
||||
|
||||
def __repr__(self):
|
||||
return "ObjectId('%s')" % (str(self),)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, ObjectId):
|
||||
return self.__id == other.binary
|
||||
return NotImplemented
|
||||
|
||||
def __ne__(self, other):
|
||||
if isinstance(other, ObjectId):
|
||||
return self.__id != other.binary
|
||||
return NotImplemented
|
||||
|
||||
def __lt__(self, other):
|
||||
if isinstance(other, ObjectId):
|
||||
return self.__id < other.binary
|
||||
return NotImplemented
|
||||
|
||||
def __le__(self, other):
|
||||
if isinstance(other, ObjectId):
|
||||
return self.__id <= other.binary
|
||||
return NotImplemented
|
||||
|
||||
def __gt__(self, other):
|
||||
if isinstance(other, ObjectId):
|
||||
return self.__id > other.binary
|
||||
return NotImplemented
|
||||
|
||||
def __ge__(self, other):
|
||||
if isinstance(other, ObjectId):
|
||||
return self.__id >= other.binary
|
||||
return NotImplemented
|
||||
|
||||
def __hash__(self):
|
||||
"""Get a hash value for this :class:`ObjectId`."""
|
||||
return hash(self.__id)
|
@ -1,107 +0,0 @@
|
||||
# Copyright 2009-present MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you
|
||||
# may not use this file except in compliance with the License. You
|
||||
# may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied. See the License for the specific language governing
|
||||
# permissions and limitations under the License.
|
||||
|
||||
"""Utility functions and definitions for python3 compatibility."""
|
||||
|
||||
import sys
|
||||
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3:
|
||||
import codecs
|
||||
import collections.abc as abc
|
||||
import _thread as thread
|
||||
from abc import ABC, abstractmethod
|
||||
from io import BytesIO as StringIO
|
||||
|
||||
def abstractproperty(func):
|
||||
return property(abstractmethod(func))
|
||||
|
||||
MAXSIZE = sys.maxsize
|
||||
|
||||
imap = map
|
||||
|
||||
def b(s):
|
||||
# BSON and socket operations deal in binary data. In
|
||||
# python 3 that means instances of `bytes`. In python
|
||||
# 2.7 you can create an alias for `bytes` using
|
||||
# the b prefix (e.g. b'foo').
|
||||
# See http://python3porting.com/problems.html#nicer-solutions
|
||||
return codecs.latin_1_encode(s)[0]
|
||||
|
||||
def bytes_from_hex(h):
|
||||
return bytes.fromhex(h)
|
||||
|
||||
def iteritems(d):
|
||||
return iter(d.items())
|
||||
|
||||
def itervalues(d):
|
||||
return iter(d.values())
|
||||
|
||||
def reraise(exctype, value, trace=None):
|
||||
raise exctype(str(value)).with_traceback(trace)
|
||||
|
||||
def reraise_instance(exc_instance, trace=None):
|
||||
raise exc_instance.with_traceback(trace)
|
||||
|
||||
def _unicode(s):
|
||||
return s
|
||||
|
||||
text_type = str
|
||||
string_type = str
|
||||
integer_types = int
|
||||
else:
|
||||
import collections as abc
|
||||
import thread
|
||||
from abc import ABCMeta, abstractproperty
|
||||
|
||||
from itertools import imap
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
except ImportError:
|
||||
from StringIO import StringIO
|
||||
|
||||
ABC = ABCMeta('ABC', (object,), {})
|
||||
|
||||
MAXSIZE = sys.maxint
|
||||
|
||||
def b(s):
|
||||
# See comments above. In python 2.x b('foo') is just 'foo'.
|
||||
return s
|
||||
|
||||
def bytes_from_hex(h):
|
||||
return h.decode('hex')
|
||||
|
||||
def iteritems(d):
|
||||
return d.iteritems()
|
||||
|
||||
def itervalues(d):
|
||||
return d.itervalues()
|
||||
|
||||
def reraise(exctype, value, trace=None):
|
||||
_reraise(exctype, str(value), trace)
|
||||
|
||||
def reraise_instance(exc_instance, trace=None):
|
||||
_reraise(exc_instance, None, trace)
|
||||
|
||||
# "raise x, y, z" raises SyntaxError in Python 3
|
||||
exec("""def _reraise(exc, value, trace):
|
||||
raise exc, value, trace
|
||||
""")
|
||||
|
||||
_unicode = unicode
|
||||
|
||||
string_type = basestring
|
||||
text_type = unicode
|
||||
integer_types = (int, long)
|
@ -1,139 +0,0 @@
|
||||
# Copyright 2015-present MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Tools for representing raw BSON documents.
|
||||
"""
|
||||
|
||||
from bson import _raw_to_dict, _get_object_size
|
||||
from bson.py3compat import abc, iteritems
|
||||
from bson.codec_options import (
|
||||
DEFAULT_CODEC_OPTIONS as DEFAULT, _RAW_BSON_DOCUMENT_MARKER)
|
||||
from bson.son import SON
|
||||
|
||||
|
||||
class RawBSONDocument(abc.Mapping):
|
||||
"""Representation for a MongoDB document that provides access to the raw
|
||||
BSON bytes that compose it.
|
||||
|
||||
Only when a field is accessed or modified within the document does
|
||||
RawBSONDocument decode its bytes.
|
||||
"""
|
||||
|
||||
__slots__ = ('__raw', '__inflated_doc', '__codec_options')
|
||||
_type_marker = _RAW_BSON_DOCUMENT_MARKER
|
||||
|
||||
def __init__(self, bson_bytes, codec_options=None):
|
||||
"""Create a new :class:`RawBSONDocument`
|
||||
|
||||
:class:`RawBSONDocument` is a representation of a BSON document that
|
||||
provides access to the underlying raw BSON bytes. Only when a field is
|
||||
accessed or modified within the document does RawBSONDocument decode
|
||||
its bytes.
|
||||
|
||||
:class:`RawBSONDocument` implements the ``Mapping`` abstract base
|
||||
class from the standard library so it can be used like a read-only
|
||||
``dict``::
|
||||
|
||||
>>> from bson import encode
|
||||
>>> raw_doc = RawBSONDocument(encode({'_id': 'my_doc'}))
|
||||
>>> raw_doc.raw
|
||||
b'...'
|
||||
>>> raw_doc['_id']
|
||||
'my_doc'
|
||||
|
||||
:Parameters:
|
||||
- `bson_bytes`: the BSON bytes that compose this document
|
||||
- `codec_options` (optional): An instance of
|
||||
:class:`~bson.codec_options.CodecOptions` whose ``document_class``
|
||||
must be :class:`RawBSONDocument`. The default is
|
||||
:attr:`DEFAULT_RAW_BSON_OPTIONS`.
|
||||
|
||||
.. versionchanged:: 3.8
|
||||
:class:`RawBSONDocument` now validates that the ``bson_bytes``
|
||||
passed in represent a single bson document.
|
||||
|
||||
.. versionchanged:: 3.5
|
||||
If a :class:`~bson.codec_options.CodecOptions` is passed in, its
|
||||
`document_class` must be :class:`RawBSONDocument`.
|
||||
"""
|
||||
self.__raw = bson_bytes
|
||||
self.__inflated_doc = None
|
||||
# Can't default codec_options to DEFAULT_RAW_BSON_OPTIONS in signature,
|
||||
# it refers to this class RawBSONDocument.
|
||||
if codec_options is None:
|
||||
codec_options = DEFAULT_RAW_BSON_OPTIONS
|
||||
elif codec_options.document_class is not RawBSONDocument:
|
||||
raise TypeError(
|
||||
"RawBSONDocument cannot use CodecOptions with document "
|
||||
"class %s" % (codec_options.document_class, ))
|
||||
self.__codec_options = codec_options
|
||||
# Validate the bson object size.
|
||||
_get_object_size(bson_bytes, 0, len(bson_bytes))
|
||||
|
||||
@property
|
||||
def raw(self):
|
||||
"""The raw BSON bytes composing this document."""
|
||||
return self.__raw
|
||||
|
||||
def items(self):
|
||||
"""Lazily decode and iterate elements in this document."""
|
||||
return iteritems(self.__inflated)
|
||||
|
||||
@property
|
||||
def __inflated(self):
|
||||
if self.__inflated_doc is None:
|
||||
# We already validated the object's size when this document was
|
||||
# created, so no need to do that again.
|
||||
# Use SON to preserve ordering of elements.
|
||||
self.__inflated_doc = _inflate_bson(
|
||||
self.__raw, self.__codec_options)
|
||||
return self.__inflated_doc
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self.__inflated[item]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__inflated)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.__inflated)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, RawBSONDocument):
|
||||
return self.__raw == other.raw
|
||||
return NotImplemented
|
||||
|
||||
def __repr__(self):
|
||||
return ("RawBSONDocument(%r, codec_options=%r)"
|
||||
% (self.raw, self.__codec_options))
|
||||
|
||||
|
||||
def _inflate_bson(bson_bytes, codec_options):
|
||||
"""Inflates the top level fields of a BSON document.
|
||||
|
||||
:Parameters:
|
||||
- `bson_bytes`: the BSON bytes that compose this document
|
||||
- `codec_options`: An instance of
|
||||
:class:`~bson.codec_options.CodecOptions` whose ``document_class``
|
||||
must be :class:`RawBSONDocument`.
|
||||
"""
|
||||
# Use SON to preserve ordering of elements.
|
||||
return _raw_to_dict(
|
||||
bson_bytes, 4, len(bson_bytes)-1, codec_options, SON())
|
||||
|
||||
|
||||
DEFAULT_RAW_BSON_OPTIONS = DEFAULT.with_options(document_class=RawBSONDocument)
|
||||
"""The default :class:`~bson.codec_options.CodecOptions` for
|
||||
:class:`RawBSONDocument`.
|
||||
"""
|
@ -1,128 +0,0 @@
|
||||
# Copyright 2013-present MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Tools for representing MongoDB regular expressions.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from bson.son import RE_TYPE
|
||||
from bson.py3compat import string_type, text_type
|
||||
|
||||
|
||||
def str_flags_to_int(str_flags):
|
||||
flags = 0
|
||||
if "i" in str_flags:
|
||||
flags |= re.IGNORECASE
|
||||
if "l" in str_flags:
|
||||
flags |= re.LOCALE
|
||||
if "m" in str_flags:
|
||||
flags |= re.MULTILINE
|
||||
if "s" in str_flags:
|
||||
flags |= re.DOTALL
|
||||
if "u" in str_flags:
|
||||
flags |= re.UNICODE
|
||||
if "x" in str_flags:
|
||||
flags |= re.VERBOSE
|
||||
|
||||
return flags
|
||||
|
||||
|
||||
class Regex(object):
|
||||
"""BSON regular expression data."""
|
||||
_type_marker = 11
|
||||
|
||||
@classmethod
|
||||
def from_native(cls, regex):
|
||||
"""Convert a Python regular expression into a ``Regex`` instance.
|
||||
|
||||
Note that in Python 3, a regular expression compiled from a
|
||||
:class:`str` has the ``re.UNICODE`` flag set. If it is undesirable
|
||||
to store this flag in a BSON regular expression, unset it first::
|
||||
|
||||
>>> pattern = re.compile('.*')
|
||||
>>> regex = Regex.from_native(pattern)
|
||||
>>> regex.flags ^= re.UNICODE
|
||||
>>> db.collection.insert({'pattern': regex})
|
||||
|
||||
:Parameters:
|
||||
- `regex`: A regular expression object from ``re.compile()``.
|
||||
|
||||
.. warning::
|
||||
Python regular expressions use a different syntax and different
|
||||
set of flags than MongoDB, which uses `PCRE`_. A regular
|
||||
expression retrieved from the server may not compile in
|
||||
Python, or may match a different set of strings in Python than
|
||||
when used in a MongoDB query.
|
||||
|
||||
.. _PCRE: http://www.pcre.org/
|
||||
"""
|
||||
if not isinstance(regex, RE_TYPE):
|
||||
raise TypeError(
|
||||
"regex must be a compiled regular expression, not %s"
|
||||
% type(regex))
|
||||
|
||||
return Regex(regex.pattern, regex.flags)
|
||||
|
||||
def __init__(self, pattern, flags=0):
|
||||
"""BSON regular expression data.
|
||||
|
||||
This class is useful to store and retrieve regular expressions that are
|
||||
incompatible with Python's regular expression dialect.
|
||||
|
||||
:Parameters:
|
||||
- `pattern`: string
|
||||
- `flags`: (optional) an integer bitmask, or a string of flag
|
||||
characters like "im" for IGNORECASE and MULTILINE
|
||||
"""
|
||||
if not isinstance(pattern, (text_type, bytes)):
|
||||
raise TypeError("pattern must be a string, not %s" % type(pattern))
|
||||
self.pattern = pattern
|
||||
|
||||
if isinstance(flags, string_type):
|
||||
self.flags = str_flags_to_int(flags)
|
||||
elif isinstance(flags, int):
|
||||
self.flags = flags
|
||||
else:
|
||||
raise TypeError(
|
||||
"flags must be a string or int, not %s" % type(flags))
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, Regex):
|
||||
return self.pattern == other.pattern and self.flags == other.flags
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
__hash__ = None
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __repr__(self):
|
||||
return "Regex(%r, %r)" % (self.pattern, self.flags)
|
||||
|
||||
def try_compile(self):
|
||||
"""Compile this :class:`Regex` as a Python regular expression.
|
||||
|
||||
.. warning::
|
||||
Python regular expressions use a different syntax and different
|
||||
set of flags than MongoDB, which uses `PCRE`_. A regular
|
||||
expression retrieved from the server may not compile in
|
||||
Python, or may match a different set of strings in Python than
|
||||
when used in a MongoDB query. :meth:`try_compile()` may raise
|
||||
:exc:`re.error`.
|
||||
|
||||
.. _PCRE: http://www.pcre.org/
|
||||
"""
|
||||
return re.compile(self.pattern, self.flags)
|
@ -1,200 +0,0 @@
|
||||
# Copyright 2009-present MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Tools for creating and manipulating SON, the Serialized Ocument Notation.
|
||||
|
||||
Regular dictionaries can be used instead of SON objects, but not when the order
|
||||
of keys is important. A SON object can be used just like a normal Python
|
||||
dictionary."""
|
||||
|
||||
import copy
|
||||
import re
|
||||
|
||||
from bson.py3compat import abc, iteritems
|
||||
|
||||
|
||||
# This sort of sucks, but seems to be as good as it gets...
|
||||
# This is essentially the same as re._pattern_type
|
||||
RE_TYPE = type(re.compile(""))
|
||||
|
||||
|
||||
class SON(dict):
|
||||
"""SON data.
|
||||
|
||||
A subclass of dict that maintains ordering of keys and provides a
|
||||
few extra niceties for dealing with SON. SON provides an API
|
||||
similar to collections.OrderedDict from Python 2.7+.
|
||||
"""
|
||||
|
||||
def __init__(self, data=None, **kwargs):
|
||||
self.__keys = []
|
||||
dict.__init__(self)
|
||||
self.update(data)
|
||||
self.update(kwargs)
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
instance = super(SON, cls).__new__(cls, *args, **kwargs)
|
||||
instance.__keys = []
|
||||
return instance
|
||||
|
||||
def __repr__(self):
|
||||
result = []
|
||||
for key in self.__keys:
|
||||
result.append("(%r, %r)" % (key, self[key]))
|
||||
return "SON([%s])" % ", ".join(result)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if key not in self.__keys:
|
||||
self.__keys.append(key)
|
||||
dict.__setitem__(self, key, value)
|
||||
|
||||
def __delitem__(self, key):
|
||||
self.__keys.remove(key)
|
||||
dict.__delitem__(self, key)
|
||||
|
||||
def keys(self):
|
||||
return list(self.__keys)
|
||||
|
||||
def copy(self):
|
||||
other = SON()
|
||||
other.update(self)
|
||||
return other
|
||||
|
||||
# TODO this is all from UserDict.DictMixin. it could probably be made more
|
||||
# efficient.
|
||||
# second level definitions support higher levels
|
||||
def __iter__(self):
|
||||
for k in self.__keys:
|
||||
yield k
|
||||
|
||||
def has_key(self, key):
|
||||
return key in self.__keys
|
||||
|
||||
# third level takes advantage of second level definitions
|
||||
def iteritems(self):
|
||||
for k in self:
|
||||
yield (k, self[k])
|
||||
|
||||
def iterkeys(self):
|
||||
return self.__iter__()
|
||||
|
||||
# fourth level uses definitions from lower levels
|
||||
def itervalues(self):
|
||||
for _, v in self.iteritems():
|
||||
yield v
|
||||
|
||||
def values(self):
|
||||
return [v for _, v in self.iteritems()]
|
||||
|
||||
def items(self):
|
||||
return [(key, self[key]) for key in self]
|
||||
|
||||
def clear(self):
|
||||
self.__keys = []
|
||||
super(SON, self).clear()
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
self[key] = default
|
||||
return default
|
||||
|
||||
def pop(self, key, *args):
|
||||
if len(args) > 1:
|
||||
raise TypeError("pop expected at most 2 arguments, got "\
|
||||
+ repr(1 + len(args)))
|
||||
try:
|
||||
value = self[key]
|
||||
except KeyError:
|
||||
if args:
|
||||
return args[0]
|
||||
raise
|
||||
del self[key]
|
||||
return value
|
||||
|
||||
def popitem(self):
|
||||
try:
|
||||
k, v = next(self.iteritems())
|
||||
except StopIteration:
|
||||
raise KeyError('container is empty')
|
||||
del self[k]
|
||||
return (k, v)
|
||||
|
||||
def update(self, other=None, **kwargs):
|
||||
# Make progressively weaker assumptions about "other"
|
||||
if other is None:
|
||||
pass
|
||||
elif hasattr(other, 'iteritems'): # iteritems saves memory and lookups
|
||||
for k, v in other.iteritems():
|
||||
self[k] = v
|
||||
elif hasattr(other, 'keys'):
|
||||
for k in other.keys():
|
||||
self[k] = other[k]
|
||||
else:
|
||||
for k, v in other:
|
||||
self[k] = v
|
||||
if kwargs:
|
||||
self.update(kwargs)
|
||||
|
||||
def get(self, key, default=None):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Comparison to another SON is order-sensitive while comparison to a
|
||||
regular dictionary is order-insensitive.
|
||||
"""
|
||||
if isinstance(other, SON):
|
||||
return len(self) == len(other) and self.items() == other.items()
|
||||
return self.to_dict() == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __len__(self):
|
||||
return len(self.__keys)
|
||||
|
||||
def to_dict(self):
|
||||
"""Convert a SON document to a normal Python dictionary instance.
|
||||
|
||||
This is trickier than just *dict(...)* because it needs to be
|
||||
recursive.
|
||||
"""
|
||||
|
||||
def transform_value(value):
|
||||
if isinstance(value, list):
|
||||
return [transform_value(v) for v in value]
|
||||
elif isinstance(value, abc.Mapping):
|
||||
return dict([
|
||||
(k, transform_value(v))
|
||||
for k, v in iteritems(value)])
|
||||
else:
|
||||
return value
|
||||
|
||||
return transform_value(dict(self))
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
out = SON()
|
||||
val_id = id(self)
|
||||
if val_id in memo:
|
||||
return memo.get(val_id)
|
||||
memo[val_id] = out
|
||||
for k, v in self.iteritems():
|
||||
if not isinstance(v, RE_TYPE):
|
||||
v = copy.deepcopy(v, memo)
|
||||
out[k] = v
|
||||
return out
|
@ -1,120 +0,0 @@
|
||||
# Copyright 2010-2015 MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Tools for representing MongoDB internal Timestamps.
|
||||
"""
|
||||
|
||||
import calendar
|
||||
import datetime
|
||||
|
||||
from bson.py3compat import integer_types
|
||||
from bson.tz_util import utc
|
||||
|
||||
UPPERBOUND = 4294967296
|
||||
|
||||
|
||||
class Timestamp(object):
|
||||
"""MongoDB internal timestamps used in the opLog.
|
||||
"""
|
||||
|
||||
_type_marker = 17
|
||||
|
||||
def __init__(self, time, inc):
|
||||
"""Create a new :class:`Timestamp`.
|
||||
|
||||
This class is only for use with the MongoDB opLog. If you need
|
||||
to store a regular timestamp, please use a
|
||||
:class:`~datetime.datetime`.
|
||||
|
||||
Raises :class:`TypeError` if `time` is not an instance of
|
||||
:class: `int` or :class:`~datetime.datetime`, or `inc` is not
|
||||
an instance of :class:`int`. Raises :class:`ValueError` if
|
||||
`time` or `inc` is not in [0, 2**32).
|
||||
|
||||
:Parameters:
|
||||
- `time`: time in seconds since epoch UTC, or a naive UTC
|
||||
:class:`~datetime.datetime`, or an aware
|
||||
:class:`~datetime.datetime`
|
||||
- `inc`: the incrementing counter
|
||||
"""
|
||||
if isinstance(time, datetime.datetime):
|
||||
if time.utcoffset() is not None:
|
||||
time = time - time.utcoffset()
|
||||
time = int(calendar.timegm(time.timetuple()))
|
||||
if not isinstance(time, integer_types):
|
||||
raise TypeError("time must be an instance of int")
|
||||
if not isinstance(inc, integer_types):
|
||||
raise TypeError("inc must be an instance of int")
|
||||
if not 0 <= time < UPPERBOUND:
|
||||
raise ValueError("time must be contained in [0, 2**32)")
|
||||
if not 0 <= inc < UPPERBOUND:
|
||||
raise ValueError("inc must be contained in [0, 2**32)")
|
||||
|
||||
self.__time = time
|
||||
self.__inc = inc
|
||||
|
||||
@property
|
||||
def time(self):
|
||||
"""Get the time portion of this :class:`Timestamp`.
|
||||
"""
|
||||
return self.__time
|
||||
|
||||
@property
|
||||
def inc(self):
|
||||
"""Get the inc portion of this :class:`Timestamp`.
|
||||
"""
|
||||
return self.__inc
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, Timestamp):
|
||||
return (self.__time == other.time and self.__inc == other.inc)
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.time) ^ hash(self.inc)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __lt__(self, other):
|
||||
if isinstance(other, Timestamp):
|
||||
return (self.time, self.inc) < (other.time, other.inc)
|
||||
return NotImplemented
|
||||
|
||||
def __le__(self, other):
|
||||
if isinstance(other, Timestamp):
|
||||
return (self.time, self.inc) <= (other.time, other.inc)
|
||||
return NotImplemented
|
||||
|
||||
def __gt__(self, other):
|
||||
if isinstance(other, Timestamp):
|
||||
return (self.time, self.inc) > (other.time, other.inc)
|
||||
return NotImplemented
|
||||
|
||||
def __ge__(self, other):
|
||||
if isinstance(other, Timestamp):
|
||||
return (self.time, self.inc) >= (other.time, other.inc)
|
||||
return NotImplemented
|
||||
|
||||
def __repr__(self):
|
||||
return "Timestamp(%s, %s)" % (self.__time, self.__inc)
|
||||
|
||||
def as_datetime(self):
|
||||
"""Return a :class:`~datetime.datetime` instance corresponding
|
||||
to the time portion of this :class:`Timestamp`.
|
||||
|
||||
The returned datetime's timezone is UTC.
|
||||
"""
|
||||
return datetime.datetime.fromtimestamp(self.__time, utc)
|
@ -1,52 +0,0 @@
|
||||
# Copyright 2010-2015 MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Timezone related utilities for BSON."""
|
||||
|
||||
from datetime import (timedelta,
|
||||
tzinfo)
|
||||
|
||||
ZERO = timedelta(0)
|
||||
|
||||
|
||||
class FixedOffset(tzinfo):
|
||||
"""Fixed offset timezone, in minutes east from UTC.
|
||||
|
||||
Implementation based from the Python `standard library documentation
|
||||
<http://docs.python.org/library/datetime.html#tzinfo-objects>`_.
|
||||
Defining __getinitargs__ enables pickling / copying.
|
||||
"""
|
||||
|
||||
def __init__(self, offset, name):
|
||||
if isinstance(offset, timedelta):
|
||||
self.__offset = offset
|
||||
else:
|
||||
self.__offset = timedelta(minutes=offset)
|
||||
self.__name = name
|
||||
|
||||
def __getinitargs__(self):
|
||||
return self.__offset, self.__name
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return self.__offset
|
||||
|
||||
def tzname(self, dt):
|
||||
return self.__name
|
||||
|
||||
def dst(self, dt):
|
||||
return ZERO
|
||||
|
||||
|
||||
utc = FixedOffset(0, "UTC")
|
||||
"""Fixed offset timezone representing UTC."""
|
@ -1,46 +0,0 @@
|
||||
from .compat import IS_TYPE_CHECKING
|
||||
from .main import load_dotenv, get_key, set_key, unset_key, find_dotenv, dotenv_values
|
||||
|
||||
if IS_TYPE_CHECKING:
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
def load_ipython_extension(ipython):
|
||||
# type: (Any) -> None
|
||||
from .ipython import load_ipython_extension
|
||||
load_ipython_extension(ipython)
|
||||
|
||||
|
||||
def get_cli_string(path=None, action=None, key=None, value=None, quote=None):
|
||||
# type: (Optional[str], Optional[str], Optional[str], Optional[str], Optional[str]) -> str
|
||||
"""Returns a string suitable for running as a shell script.
|
||||
|
||||
Useful for converting a arguments passed to a fabric task
|
||||
to be passed to a `local` or `run` command.
|
||||
"""
|
||||
command = ['dotenv']
|
||||
if quote:
|
||||
command.append('-q %s' % quote)
|
||||
if path:
|
||||
command.append('-f %s' % path)
|
||||
if action:
|
||||
command.append(action)
|
||||
if key:
|
||||
command.append(key)
|
||||
if value:
|
||||
if ' ' in value:
|
||||
command.append('"%s"' % value)
|
||||
else:
|
||||
command.append(value)
|
||||
|
||||
return ' '.join(command).strip()
|
||||
|
||||
|
||||
__all__ = ['get_cli_string',
|
||||
'load_dotenv',
|
||||
'dotenv_values',
|
||||
'get_key',
|
||||
'set_key',
|
||||
'unset_key',
|
||||
'find_dotenv',
|
||||
'load_ipython_extension']
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,145 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
from subprocess import Popen
|
||||
|
||||
try:
|
||||
import click
|
||||
except ImportError:
|
||||
sys.stderr.write('It seems python-dotenv is not installed with cli option. \n'
|
||||
'Run pip install "python-dotenv[cli]" to fix this.')
|
||||
sys.exit(1)
|
||||
|
||||
from .compat import IS_TYPE_CHECKING, to_env
|
||||
from .main import dotenv_values, get_key, set_key, unset_key
|
||||
from .version import __version__
|
||||
|
||||
if IS_TYPE_CHECKING:
|
||||
from typing import Any, List, Dict
|
||||
|
||||
|
||||
@click.group()
|
||||
@click.option('-f', '--file', default=os.path.join(os.getcwd(), '.env'),
|
||||
type=click.Path(exists=True),
|
||||
help="Location of the .env file, defaults to .env file in current working directory.")
|
||||
@click.option('-q', '--quote', default='always',
|
||||
type=click.Choice(['always', 'never', 'auto']),
|
||||
help="Whether to quote or not the variable values. Default mode is always. This does not affect parsing.")
|
||||
@click.version_option(version=__version__)
|
||||
@click.pass_context
|
||||
def cli(ctx, file, quote):
|
||||
# type: (click.Context, Any, Any) -> None
|
||||
'''This script is used to set, get or unset values from a .env file.'''
|
||||
ctx.obj = {}
|
||||
ctx.obj['FILE'] = file
|
||||
ctx.obj['QUOTE'] = quote
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.pass_context
|
||||
def list(ctx):
|
||||
# type: (click.Context) -> None
|
||||
'''Display all the stored key/value.'''
|
||||
file = ctx.obj['FILE']
|
||||
dotenv_as_dict = dotenv_values(file)
|
||||
for k, v in dotenv_as_dict.items():
|
||||
click.echo('%s=%s' % (k, v))
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.pass_context
|
||||
@click.argument('key', required=True)
|
||||
@click.argument('value', required=True)
|
||||
def set(ctx, key, value):
|
||||
# type: (click.Context, Any, Any) -> None
|
||||
'''Store the given key/value.'''
|
||||
file = ctx.obj['FILE']
|
||||
quote = ctx.obj['QUOTE']
|
||||
success, key, value = set_key(file, key, value, quote)
|
||||
if success:
|
||||
click.echo('%s=%s' % (key, value))
|
||||
else:
|
||||
exit(1)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.pass_context
|
||||
@click.argument('key', required=True)
|
||||
def get(ctx, key):
|
||||
# type: (click.Context, Any) -> None
|
||||
'''Retrieve the value for the given key.'''
|
||||
file = ctx.obj['FILE']
|
||||
stored_value = get_key(file, key)
|
||||
if stored_value:
|
||||
click.echo('%s=%s' % (key, stored_value))
|
||||
else:
|
||||
exit(1)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.pass_context
|
||||
@click.argument('key', required=True)
|
||||
def unset(ctx, key):
|
||||
# type: (click.Context, Any) -> None
|
||||
'''Removes the given key.'''
|
||||
file = ctx.obj['FILE']
|
||||
quote = ctx.obj['QUOTE']
|
||||
success, key = unset_key(file, key, quote)
|
||||
if success:
|
||||
click.echo("Successfully removed %s" % key)
|
||||
else:
|
||||
exit(1)
|
||||
|
||||
|
||||
@cli.command(context_settings={'ignore_unknown_options': True})
|
||||
@click.pass_context
|
||||
@click.argument('commandline', nargs=-1, type=click.UNPROCESSED)
|
||||
def run(ctx, commandline):
|
||||
# type: (click.Context, List[str]) -> None
|
||||
"""Run command with environment variables present."""
|
||||
file = ctx.obj['FILE']
|
||||
dotenv_as_dict = {to_env(k): to_env(v) for (k, v) in dotenv_values(file).items() if v is not None}
|
||||
|
||||
if not commandline:
|
||||
click.echo('No command given.')
|
||||
exit(1)
|
||||
ret = run_command(commandline, dotenv_as_dict)
|
||||
exit(ret)
|
||||
|
||||
|
||||
def run_command(command, env):
|
||||
# type: (List[str], Dict[str, str]) -> int
|
||||
"""Run command in sub process.
|
||||
|
||||
Runs the command in a sub process with the variables from `env`
|
||||
added in the current environment variables.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
command: List[str]
|
||||
The command and it's parameters
|
||||
env: Dict
|
||||
The additional environment variables
|
||||
|
||||
Returns
|
||||
-------
|
||||
int
|
||||
The return code of the command
|
||||
|
||||
"""
|
||||
# copy the current environment variables and add the vales from
|
||||
# `env`
|
||||
cmd_env = os.environ.copy()
|
||||
cmd_env.update(env)
|
||||
|
||||
p = Popen(command,
|
||||
universal_newlines=True,
|
||||
bufsize=0,
|
||||
shell=False,
|
||||
env=cmd_env)
|
||||
_, _ = p.communicate()
|
||||
|
||||
return p.returncode
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
@ -1,49 +0,0 @@
|
||||
import sys
|
||||
|
||||
PY2 = sys.version_info[0] == 2 # type: bool
|
||||
|
||||
if PY2:
|
||||
from StringIO import StringIO # noqa
|
||||
else:
|
||||
from io import StringIO # noqa
|
||||
|
||||
|
||||
def is_type_checking():
|
||||
# type: () -> bool
|
||||
try:
|
||||
from typing import TYPE_CHECKING
|
||||
except ImportError:
|
||||
return False
|
||||
return TYPE_CHECKING
|
||||
|
||||
|
||||
IS_TYPE_CHECKING = is_type_checking()
|
||||
|
||||
|
||||
if IS_TYPE_CHECKING:
|
||||
from typing import Text
|
||||
|
||||
|
||||
def to_env(text):
|
||||
# type: (Text) -> str
|
||||
"""
|
||||
Encode a string the same way whether it comes from the environment or a `.env` file.
|
||||
"""
|
||||
if PY2:
|
||||
return text.encode(sys.getfilesystemencoding() or "utf-8")
|
||||
else:
|
||||
return text
|
||||
|
||||
|
||||
def to_text(string):
|
||||
# type: (str) -> Text
|
||||
"""
|
||||
Make a string Unicode if it isn't already.
|
||||
|
||||
This is useful for defining raw unicode strings because `ur"foo"` isn't valid in
|
||||
Python 3.
|
||||
"""
|
||||
if PY2:
|
||||
return string.decode("utf-8")
|
||||
else:
|
||||
return string
|
@ -1,41 +0,0 @@
|
||||
from __future__ import print_function
|
||||
|
||||
from IPython.core.magic import Magics, line_magic, magics_class # type: ignore
|
||||
from IPython.core.magic_arguments import (argument, magic_arguments, # type: ignore
|
||||
parse_argstring) # type: ignore
|
||||
|
||||
from .main import find_dotenv, load_dotenv
|
||||
|
||||
|
||||
@magics_class
|
||||
class IPythonDotEnv(Magics):
|
||||
|
||||
@magic_arguments()
|
||||
@argument(
|
||||
'-o', '--override', action='store_true',
|
||||
help="Indicate to override existing variables"
|
||||
)
|
||||
@argument(
|
||||
'-v', '--verbose', action='store_true',
|
||||
help="Indicate function calls to be verbose"
|
||||
)
|
||||
@argument('dotenv_path', nargs='?', type=str, default='.env',
|
||||
help='Search in increasingly higher folders for the `dotenv_path`')
|
||||
@line_magic
|
||||
def dotenv(self, line):
|
||||
args = parse_argstring(self.dotenv, line)
|
||||
# Locate the .env file
|
||||
dotenv_path = args.dotenv_path
|
||||
try:
|
||||
dotenv_path = find_dotenv(dotenv_path, True, True)
|
||||
except IOError:
|
||||
print("cannot find .env file")
|
||||
return
|
||||
|
||||
# Load the .env file
|
||||
load_dotenv(dotenv_path, verbose=args.verbose, override=args.override)
|
||||
|
||||
|
||||
def load_ipython_extension(ipython):
|
||||
"""Register the %dotenv magic."""
|
||||
ipython.register_magics(IPythonDotEnv)
|
@ -1,323 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from collections import OrderedDict
|
||||
from contextlib import contextmanager
|
||||
|
||||
from .compat import IS_TYPE_CHECKING, PY2, StringIO, to_env
|
||||
from .parser import Binding, parse_stream
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if IS_TYPE_CHECKING:
|
||||
from typing import (
|
||||
Dict, Iterator, Match, Optional, Pattern, Union, Text, IO, Tuple
|
||||
)
|
||||
if sys.version_info >= (3, 6):
|
||||
_PathLike = os.PathLike
|
||||
else:
|
||||
_PathLike = Text
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
_StringIO = StringIO
|
||||
else:
|
||||
_StringIO = StringIO[Text]
|
||||
|
||||
__posix_variable = re.compile(
|
||||
r"""
|
||||
\$\{
|
||||
(?P<name>[^\}:]*)
|
||||
(?::-
|
||||
(?P<default>[^\}]*)
|
||||
)?
|
||||
\}
|
||||
""",
|
||||
re.VERBOSE,
|
||||
) # type: Pattern[Text]
|
||||
|
||||
|
||||
def with_warn_for_invalid_lines(mappings):
|
||||
# type: (Iterator[Binding]) -> Iterator[Binding]
|
||||
for mapping in mappings:
|
||||
if mapping.error:
|
||||
logger.warning(
|
||||
"Python-dotenv could not parse statement starting at line %s",
|
||||
mapping.original.line,
|
||||
)
|
||||
yield mapping
|
||||
|
||||
|
||||
class DotEnv():
|
||||
|
||||
def __init__(self, dotenv_path, verbose=False, encoding=None, interpolate=True):
|
||||
# type: (Union[Text, _PathLike, _StringIO], bool, Union[None, Text], bool) -> None
|
||||
self.dotenv_path = dotenv_path # type: Union[Text,_PathLike, _StringIO]
|
||||
self._dict = None # type: Optional[Dict[Text, Optional[Text]]]
|
||||
self.verbose = verbose # type: bool
|
||||
self.encoding = encoding # type: Union[None, Text]
|
||||
self.interpolate = interpolate # type: bool
|
||||
|
||||
@contextmanager
|
||||
def _get_stream(self):
|
||||
# type: () -> Iterator[IO[Text]]
|
||||
if isinstance(self.dotenv_path, StringIO):
|
||||
yield self.dotenv_path
|
||||
elif os.path.isfile(self.dotenv_path):
|
||||
with io.open(self.dotenv_path, encoding=self.encoding) as stream:
|
||||
yield stream
|
||||
else:
|
||||
if self.verbose:
|
||||
logger.warning("File doesn't exist %s", self.dotenv_path)
|
||||
yield StringIO('')
|
||||
|
||||
def dict(self):
|
||||
# type: () -> Dict[Text, Optional[Text]]
|
||||
"""Return dotenv as dict"""
|
||||
if self._dict:
|
||||
return self._dict
|
||||
|
||||
values = OrderedDict(self.parse())
|
||||
self._dict = resolve_nested_variables(values) if self.interpolate else values
|
||||
return self._dict
|
||||
|
||||
def parse(self):
|
||||
# type: () -> Iterator[Tuple[Text, Optional[Text]]]
|
||||
with self._get_stream() as stream:
|
||||
for mapping in with_warn_for_invalid_lines(parse_stream(stream)):
|
||||
if mapping.key is not None:
|
||||
yield mapping.key, mapping.value
|
||||
|
||||
def set_as_environment_variables(self, override=False):
|
||||
# type: (bool) -> bool
|
||||
"""
|
||||
Load the current dotenv as system environemt variable.
|
||||
"""
|
||||
for k, v in self.dict().items():
|
||||
if k in os.environ and not override:
|
||||
continue
|
||||
if v is not None:
|
||||
os.environ[to_env(k)] = to_env(v)
|
||||
|
||||
return True
|
||||
|
||||
def get(self, key):
|
||||
# type: (Text) -> Optional[Text]
|
||||
"""
|
||||
"""
|
||||
data = self.dict()
|
||||
|
||||
if key in data:
|
||||
return data[key]
|
||||
|
||||
if self.verbose:
|
||||
logger.warning("Key %s not found in %s.", key, self.dotenv_path)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_key(dotenv_path, key_to_get):
|
||||
# type: (Union[Text, _PathLike], Text) -> Optional[Text]
|
||||
"""
|
||||
Gets the value of a given key from the given .env
|
||||
|
||||
If the .env path given doesn't exist, fails
|
||||
"""
|
||||
return DotEnv(dotenv_path, verbose=True).get(key_to_get)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def rewrite(path):
|
||||
# type: (_PathLike) -> Iterator[Tuple[IO[Text], IO[Text]]]
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(mode="w+", delete=False) as dest:
|
||||
with io.open(path) as source:
|
||||
yield (source, dest) # type: ignore
|
||||
except BaseException:
|
||||
if os.path.isfile(dest.name):
|
||||
os.unlink(dest.name)
|
||||
raise
|
||||
else:
|
||||
shutil.move(dest.name, path)
|
||||
|
||||
|
||||
def set_key(dotenv_path, key_to_set, value_to_set, quote_mode="always"):
|
||||
# type: (_PathLike, Text, Text, Text) -> Tuple[Optional[bool], Text, Text]
|
||||
"""
|
||||
Adds or Updates a key/value to the given .env
|
||||
|
||||
If the .env path given doesn't exist, fails instead of risking creating
|
||||
an orphan .env somewhere in the filesystem
|
||||
"""
|
||||
value_to_set = value_to_set.strip("'").strip('"')
|
||||
if not os.path.exists(dotenv_path):
|
||||
logger.warning("Can't write to %s - it doesn't exist.", dotenv_path)
|
||||
return None, key_to_set, value_to_set
|
||||
|
||||
if " " in value_to_set:
|
||||
quote_mode = "always"
|
||||
|
||||
if quote_mode == "always":
|
||||
value_out = '"{}"'.format(value_to_set.replace('"', '\\"'))
|
||||
else:
|
||||
value_out = value_to_set
|
||||
line_out = "{}={}\n".format(key_to_set, value_out)
|
||||
|
||||
with rewrite(dotenv_path) as (source, dest):
|
||||
replaced = False
|
||||
for mapping in with_warn_for_invalid_lines(parse_stream(source)):
|
||||
if mapping.key == key_to_set:
|
||||
dest.write(line_out)
|
||||
replaced = True
|
||||
else:
|
||||
dest.write(mapping.original.string)
|
||||
if not replaced:
|
||||
dest.write(line_out)
|
||||
|
||||
return True, key_to_set, value_to_set
|
||||
|
||||
|
||||
def unset_key(dotenv_path, key_to_unset, quote_mode="always"):
|
||||
# type: (_PathLike, Text, Text) -> Tuple[Optional[bool], Text]
|
||||
"""
|
||||
Removes a given key from the given .env
|
||||
|
||||
If the .env path given doesn't exist, fails
|
||||
If the given key doesn't exist in the .env, fails
|
||||
"""
|
||||
if not os.path.exists(dotenv_path):
|
||||
logger.warning("Can't delete from %s - it doesn't exist.", dotenv_path)
|
||||
return None, key_to_unset
|
||||
|
||||
removed = False
|
||||
with rewrite(dotenv_path) as (source, dest):
|
||||
for mapping in with_warn_for_invalid_lines(parse_stream(source)):
|
||||
if mapping.key == key_to_unset:
|
||||
removed = True
|
||||
else:
|
||||
dest.write(mapping.original.string)
|
||||
|
||||
if not removed:
|
||||
logger.warning("Key %s not removed from %s - key doesn't exist.", key_to_unset, dotenv_path)
|
||||
return None, key_to_unset
|
||||
|
||||
return removed, key_to_unset
|
||||
|
||||
|
||||
def resolve_nested_variables(values):
|
||||
# type: (Dict[Text, Optional[Text]]) -> Dict[Text, Optional[Text]]
|
||||
def _replacement(name, default):
|
||||
# type: (Text, Optional[Text]) -> Text
|
||||
"""
|
||||
get appropriate value for a variable name.
|
||||
first search in environ, if not found,
|
||||
then look into the dotenv variables
|
||||
"""
|
||||
default = default if default is not None else ""
|
||||
ret = os.getenv(name, new_values.get(name, default))
|
||||
return ret # type: ignore
|
||||
|
||||
def _re_sub_callback(match):
|
||||
# type: (Match[Text]) -> Text
|
||||
"""
|
||||
From a match object gets the variable name and returns
|
||||
the correct replacement
|
||||
"""
|
||||
matches = match.groupdict()
|
||||
return _replacement(name=matches["name"], default=matches["default"]) # type: ignore
|
||||
|
||||
new_values = {}
|
||||
|
||||
for k, v in values.items():
|
||||
new_values[k] = __posix_variable.sub(_re_sub_callback, v) if v is not None else None
|
||||
|
||||
return new_values
|
||||
|
||||
|
||||
def _walk_to_root(path):
|
||||
# type: (Text) -> Iterator[Text]
|
||||
"""
|
||||
Yield directories starting from the given directory up to the root
|
||||
"""
|
||||
if not os.path.exists(path):
|
||||
raise IOError('Starting path not found')
|
||||
|
||||
if os.path.isfile(path):
|
||||
path = os.path.dirname(path)
|
||||
|
||||
last_dir = None
|
||||
current_dir = os.path.abspath(path)
|
||||
while last_dir != current_dir:
|
||||
yield current_dir
|
||||
parent_dir = os.path.abspath(os.path.join(current_dir, os.path.pardir))
|
||||
last_dir, current_dir = current_dir, parent_dir
|
||||
|
||||
|
||||
def find_dotenv(filename='.env', raise_error_if_not_found=False, usecwd=False):
|
||||
# type: (Text, bool, bool) -> Text
|
||||
"""
|
||||
Search in increasingly higher folders for the given file
|
||||
|
||||
Returns path to the file if found, or an empty string otherwise
|
||||
"""
|
||||
|
||||
def _is_interactive():
|
||||
""" Decide whether this is running in a REPL or IPython notebook """
|
||||
main = __import__('__main__', None, None, fromlist=['__file__'])
|
||||
return not hasattr(main, '__file__')
|
||||
|
||||
if usecwd or _is_interactive() or getattr(sys, 'frozen', False):
|
||||
# Should work without __file__, e.g. in REPL or IPython notebook.
|
||||
path = os.getcwd()
|
||||
else:
|
||||
# will work for .py files
|
||||
frame = sys._getframe()
|
||||
# find first frame that is outside of this file
|
||||
if PY2 and not __file__.endswith('.py'):
|
||||
# in Python2 __file__ extension could be .pyc or .pyo (this doesn't account
|
||||
# for edge case of Python compiled for non-standard extension)
|
||||
current_file = __file__.rsplit('.', 1)[0] + '.py'
|
||||
else:
|
||||
current_file = __file__
|
||||
|
||||
while frame.f_code.co_filename == current_file:
|
||||
assert frame.f_back is not None
|
||||
frame = frame.f_back
|
||||
frame_filename = frame.f_code.co_filename
|
||||
path = os.path.dirname(os.path.abspath(frame_filename))
|
||||
|
||||
for dirname in _walk_to_root(path):
|
||||
check_path = os.path.join(dirname, filename)
|
||||
if os.path.isfile(check_path):
|
||||
return check_path
|
||||
|
||||
if raise_error_if_not_found:
|
||||
raise IOError('File not found')
|
||||
|
||||
return ''
|
||||
|
||||
|
||||
def load_dotenv(dotenv_path=None, stream=None, verbose=False, override=False, interpolate=True, **kwargs):
|
||||
# type: (Union[Text, _PathLike, None], Optional[_StringIO], bool, bool, bool, Union[None, Text]) -> bool
|
||||
"""Parse a .env file and then load all the variables found as environment variables.
|
||||
|
||||
- *dotenv_path*: absolute or relative path to .env file.
|
||||
- *stream*: `StringIO` object with .env content.
|
||||
- *verbose*: whether to output the warnings related to missing .env file etc. Defaults to `False`.
|
||||
- *override*: where to override the system environment variables with the variables in `.env` file.
|
||||
Defaults to `False`.
|
||||
"""
|
||||
f = dotenv_path or stream or find_dotenv()
|
||||
return DotEnv(f, verbose=verbose, interpolate=interpolate, **kwargs).set_as_environment_variables(override=override)
|
||||
|
||||
|
||||
def dotenv_values(dotenv_path=None, stream=None, verbose=False, interpolate=True, **kwargs):
|
||||
# type: (Union[Text, _PathLike, None], Optional[_StringIO], bool, bool, Union[None, Text]) -> Dict[Text, Optional[Text]] # noqa: E501
|
||||
f = dotenv_path or stream or find_dotenv()
|
||||
return DotEnv(f, verbose=verbose, interpolate=interpolate, **kwargs).dict()
|
@ -1,237 +0,0 @@
|
||||
import codecs
|
||||
import re
|
||||
|
||||
from .compat import IS_TYPE_CHECKING, to_text
|
||||
|
||||
if IS_TYPE_CHECKING:
|
||||
from typing import ( # noqa:F401
|
||||
IO, Iterator, Match, NamedTuple, Optional, Pattern, Sequence, Text,
|
||||
Tuple
|
||||
)
|
||||
|
||||
|
||||
def make_regex(string, extra_flags=0):
|
||||
# type: (str, int) -> Pattern[Text]
|
||||
return re.compile(to_text(string), re.UNICODE | extra_flags)
|
||||
|
||||
|
||||
_newline = make_regex(r"(\r\n|\n|\r)")
|
||||
_multiline_whitespace = make_regex(r"\s*", extra_flags=re.MULTILINE)
|
||||
_whitespace = make_regex(r"[^\S\r\n]*")
|
||||
_export = make_regex(r"(?:export[^\S\r\n]+)?")
|
||||
_single_quoted_key = make_regex(r"'([^']+)'")
|
||||
_unquoted_key = make_regex(r"([^=\#\s]+)")
|
||||
_equal_sign = make_regex(r"(=[^\S\r\n]*)")
|
||||
_single_quoted_value = make_regex(r"'((?:\\'|[^'])*)'")
|
||||
_double_quoted_value = make_regex(r'"((?:\\"|[^"])*)"')
|
||||
_unquoted_value_part = make_regex(r"([^ \r\n]*)")
|
||||
_comment = make_regex(r"(?:[^\S\r\n]*#[^\r\n]*)?")
|
||||
_end_of_line = make_regex(r"[^\S\r\n]*(?:\r\n|\n|\r|$)")
|
||||
_rest_of_line = make_regex(r"[^\r\n]*(?:\r|\n|\r\n)?")
|
||||
_double_quote_escapes = make_regex(r"\\[\\'\"abfnrtv]")
|
||||
_single_quote_escapes = make_regex(r"\\[\\']")
|
||||
|
||||
|
||||
try:
|
||||
# this is necessary because we only import these from typing
|
||||
# when we are type checking, and the linter is upset if we
|
||||
# re-import
|
||||
import typing
|
||||
|
||||
Original = typing.NamedTuple(
|
||||
"Original",
|
||||
[
|
||||
("string", typing.Text),
|
||||
("line", int),
|
||||
],
|
||||
)
|
||||
|
||||
Binding = typing.NamedTuple(
|
||||
"Binding",
|
||||
[
|
||||
("key", typing.Optional[typing.Text]),
|
||||
("value", typing.Optional[typing.Text]),
|
||||
("original", Original),
|
||||
("error", bool),
|
||||
],
|
||||
)
|
||||
except ImportError:
|
||||
from collections import namedtuple
|
||||
Original = namedtuple( # type: ignore
|
||||
"Original",
|
||||
[
|
||||
"string",
|
||||
"line",
|
||||
],
|
||||
)
|
||||
Binding = namedtuple( # type: ignore
|
||||
"Binding",
|
||||
[
|
||||
"key",
|
||||
"value",
|
||||
"original",
|
||||
"error",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class Position:
|
||||
def __init__(self, chars, line):
|
||||
# type: (int, int) -> None
|
||||
self.chars = chars
|
||||
self.line = line
|
||||
|
||||
@classmethod
|
||||
def start(cls):
|
||||
# type: () -> Position
|
||||
return cls(chars=0, line=1)
|
||||
|
||||
def set(self, other):
|
||||
# type: (Position) -> None
|
||||
self.chars = other.chars
|
||||
self.line = other.line
|
||||
|
||||
def advance(self, string):
|
||||
# type: (Text) -> None
|
||||
self.chars += len(string)
|
||||
self.line += len(re.findall(_newline, string))
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Reader:
|
||||
def __init__(self, stream):
|
||||
# type: (IO[Text]) -> None
|
||||
self.string = stream.read()
|
||||
self.position = Position.start()
|
||||
self.mark = Position.start()
|
||||
|
||||
def has_next(self):
|
||||
# type: () -> bool
|
||||
return self.position.chars < len(self.string)
|
||||
|
||||
def set_mark(self):
|
||||
# type: () -> None
|
||||
self.mark.set(self.position)
|
||||
|
||||
def get_marked(self):
|
||||
# type: () -> Original
|
||||
return Original(
|
||||
string=self.string[self.mark.chars:self.position.chars],
|
||||
line=self.mark.line,
|
||||
)
|
||||
|
||||
def peek(self, count):
|
||||
# type: (int) -> Text
|
||||
return self.string[self.position.chars:self.position.chars + count]
|
||||
|
||||
def read(self, count):
|
||||
# type: (int) -> Text
|
||||
result = self.string[self.position.chars:self.position.chars + count]
|
||||
if len(result) < count:
|
||||
raise Error("read: End of string")
|
||||
self.position.advance(result)
|
||||
return result
|
||||
|
||||
def read_regex(self, regex):
|
||||
# type: (Pattern[Text]) -> Sequence[Text]
|
||||
match = regex.match(self.string, self.position.chars)
|
||||
if match is None:
|
||||
raise Error("read_regex: Pattern not found")
|
||||
self.position.advance(self.string[match.start():match.end()])
|
||||
return match.groups()
|
||||
|
||||
|
||||
def decode_escapes(regex, string):
|
||||
# type: (Pattern[Text], Text) -> Text
|
||||
def decode_match(match):
|
||||
# type: (Match[Text]) -> Text
|
||||
return codecs.decode(match.group(0), 'unicode-escape') # type: ignore
|
||||
|
||||
return regex.sub(decode_match, string)
|
||||
|
||||
|
||||
def parse_key(reader):
|
||||
# type: (Reader) -> Optional[Text]
|
||||
char = reader.peek(1)
|
||||
if char == "#":
|
||||
return None
|
||||
elif char == "'":
|
||||
(key,) = reader.read_regex(_single_quoted_key)
|
||||
else:
|
||||
(key,) = reader.read_regex(_unquoted_key)
|
||||
return key
|
||||
|
||||
|
||||
def parse_unquoted_value(reader):
|
||||
# type: (Reader) -> Text
|
||||
value = u""
|
||||
while True:
|
||||
(part,) = reader.read_regex(_unquoted_value_part)
|
||||
value += part
|
||||
after = reader.peek(2)
|
||||
if len(after) < 2 or after[0] in u"\r\n" or after[1] in u" #\r\n":
|
||||
return value
|
||||
value += reader.read(2)
|
||||
|
||||
|
||||
def parse_value(reader):
|
||||
# type: (Reader) -> Text
|
||||
char = reader.peek(1)
|
||||
if char == u"'":
|
||||
(value,) = reader.read_regex(_single_quoted_value)
|
||||
return decode_escapes(_single_quote_escapes, value)
|
||||
elif char == u'"':
|
||||
(value,) = reader.read_regex(_double_quoted_value)
|
||||
return decode_escapes(_double_quote_escapes, value)
|
||||
elif char in (u"", u"\n", u"\r"):
|
||||
return u""
|
||||
else:
|
||||
return parse_unquoted_value(reader)
|
||||
|
||||
|
||||
def parse_binding(reader):
|
||||
# type: (Reader) -> Binding
|
||||
reader.set_mark()
|
||||
try:
|
||||
reader.read_regex(_multiline_whitespace)
|
||||
if not reader.has_next():
|
||||
return Binding(
|
||||
key=None,
|
||||
value=None,
|
||||
original=reader.get_marked(),
|
||||
error=False,
|
||||
)
|
||||
reader.read_regex(_export)
|
||||
key = parse_key(reader)
|
||||
reader.read_regex(_whitespace)
|
||||
if reader.peek(1) == "=":
|
||||
reader.read_regex(_equal_sign)
|
||||
value = parse_value(reader) # type: Optional[Text]
|
||||
else:
|
||||
value = None
|
||||
reader.read_regex(_comment)
|
||||
reader.read_regex(_end_of_line)
|
||||
return Binding(
|
||||
key=key,
|
||||
value=value,
|
||||
original=reader.get_marked(),
|
||||
error=False,
|
||||
)
|
||||
except Error:
|
||||
reader.read_regex(_rest_of_line)
|
||||
return Binding(
|
||||
key=None,
|
||||
value=None,
|
||||
original=reader.get_marked(),
|
||||
error=True,
|
||||
)
|
||||
|
||||
|
||||
def parse_stream(stream):
|
||||
# type: (IO[Text]) -> Iterator[Binding]
|
||||
reader = Reader(stream)
|
||||
while reader.has_next():
|
||||
yield parse_binding(reader)
|
@ -1 +0,0 @@
|
||||
# Marker file for PEP 561
|
@ -1 +0,0 @@
|
||||
__version__ = "0.13.0"
|
@ -1,5 +0,0 @@
|
||||
"""Run the EasyInstall command"""
|
||||
|
||||
if __name__ == '__main__':
|
||||
from setuptools.command.easy_install import main
|
||||
main()
|
@ -1,930 +0,0 @@
|
||||
# Copyright 2009-present MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""GridFS is a specification for storing large objects in Mongo.
|
||||
|
||||
The :mod:`gridfs` package is an implementation of GridFS on top of
|
||||
:mod:`pymongo`, exposing a file-like interface.
|
||||
|
||||
.. mongodoc:: gridfs
|
||||
"""
|
||||
|
||||
from bson.py3compat import abc
|
||||
from gridfs.errors import NoFile
|
||||
from gridfs.grid_file import (GridIn,
|
||||
GridOut,
|
||||
GridOutCursor,
|
||||
DEFAULT_CHUNK_SIZE,
|
||||
_clear_entity_type_registry)
|
||||
from pymongo import (ASCENDING,
|
||||
DESCENDING)
|
||||
from pymongo.common import UNAUTHORIZED_CODES, validate_string
|
||||
from pymongo.database import Database
|
||||
from pymongo.errors import ConfigurationError, OperationFailure
|
||||
|
||||
|
||||
class GridFS(object):
|
||||
"""An instance of GridFS on top of a single Database.
|
||||
"""
|
||||
def __init__(self, database, collection="fs", disable_md5=False):
|
||||
"""Create a new instance of :class:`GridFS`.
|
||||
|
||||
Raises :class:`TypeError` if `database` is not an instance of
|
||||
:class:`~pymongo.database.Database`.
|
||||
|
||||
:Parameters:
|
||||
- `database`: database to use
|
||||
- `collection` (optional): root collection to use
|
||||
- `disable_md5` (optional): When True, MD5 checksums will not be
|
||||
computed for uploaded files. Useful in environments where MD5
|
||||
cannot be used for regulatory or other reasons. Defaults to False.
|
||||
|
||||
.. versionchanged:: 3.1
|
||||
Indexes are only ensured on the first write to the DB.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
`database` must use an acknowledged
|
||||
:attr:`~pymongo.database.Database.write_concern`
|
||||
|
||||
.. mongodoc:: gridfs
|
||||
"""
|
||||
if not isinstance(database, Database):
|
||||
raise TypeError("database must be an instance of Database")
|
||||
|
||||
database = _clear_entity_type_registry(database)
|
||||
|
||||
if not database.write_concern.acknowledged:
|
||||
raise ConfigurationError('database must use '
|
||||
'acknowledged write_concern')
|
||||
|
||||
self.__database = database
|
||||
self.__collection = database[collection]
|
||||
self.__files = self.__collection.files
|
||||
self.__chunks = self.__collection.chunks
|
||||
self.__disable_md5 = disable_md5
|
||||
|
||||
def new_file(self, **kwargs):
|
||||
"""Create a new file in GridFS.
|
||||
|
||||
Returns a new :class:`~gridfs.grid_file.GridIn` instance to
|
||||
which data can be written. Any keyword arguments will be
|
||||
passed through to :meth:`~gridfs.grid_file.GridIn`.
|
||||
|
||||
If the ``"_id"`` of the file is manually specified, it must
|
||||
not already exist in GridFS. Otherwise
|
||||
:class:`~gridfs.errors.FileExists` is raised.
|
||||
|
||||
:Parameters:
|
||||
- `**kwargs` (optional): keyword arguments for file creation
|
||||
"""
|
||||
# No need for __ensure_index_files_id() here; GridIn ensures
|
||||
# the (files_id, n) index when needed.
|
||||
return GridIn(
|
||||
self.__collection, disable_md5=self.__disable_md5, **kwargs)
|
||||
|
||||
def put(self, data, **kwargs):
|
||||
"""Put data in GridFS as a new file.
|
||||
|
||||
Equivalent to doing::
|
||||
|
||||
try:
|
||||
f = new_file(**kwargs)
|
||||
f.write(data)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
`data` can be either an instance of :class:`str` (:class:`bytes`
|
||||
in python 3) or a file-like object providing a :meth:`read` method.
|
||||
If an `encoding` keyword argument is passed, `data` can also be a
|
||||
:class:`unicode` (:class:`str` in python 3) instance, which will
|
||||
be encoded as `encoding` before being written. Any keyword arguments
|
||||
will be passed through to the created file - see
|
||||
:meth:`~gridfs.grid_file.GridIn` for possible arguments. Returns the
|
||||
``"_id"`` of the created file.
|
||||
|
||||
If the ``"_id"`` of the file is manually specified, it must
|
||||
not already exist in GridFS. Otherwise
|
||||
:class:`~gridfs.errors.FileExists` is raised.
|
||||
|
||||
:Parameters:
|
||||
- `data`: data to be written as a file.
|
||||
- `**kwargs` (optional): keyword arguments for file creation
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
w=0 writes to GridFS are now prohibited.
|
||||
"""
|
||||
grid_file = GridIn(
|
||||
self.__collection, disable_md5=self.__disable_md5, **kwargs)
|
||||
try:
|
||||
grid_file.write(data)
|
||||
finally:
|
||||
grid_file.close()
|
||||
|
||||
return grid_file._id
|
||||
|
||||
def get(self, file_id, session=None):
|
||||
"""Get a file from GridFS by ``"_id"``.
|
||||
|
||||
Returns an instance of :class:`~gridfs.grid_file.GridOut`,
|
||||
which provides a file-like interface for reading.
|
||||
|
||||
:Parameters:
|
||||
- `file_id`: ``"_id"`` of the file to get
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession`
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
"""
|
||||
gout = GridOut(self.__collection, file_id, session=session)
|
||||
|
||||
# Raise NoFile now, instead of on first attribute access.
|
||||
gout._ensure_file()
|
||||
return gout
|
||||
|
||||
def get_version(self, filename=None, version=-1, session=None, **kwargs):
|
||||
"""Get a file from GridFS by ``"filename"`` or metadata fields.
|
||||
|
||||
Returns a version of the file in GridFS whose filename matches
|
||||
`filename` and whose metadata fields match the supplied keyword
|
||||
arguments, as an instance of :class:`~gridfs.grid_file.GridOut`.
|
||||
|
||||
Version numbering is a convenience atop the GridFS API provided
|
||||
by MongoDB. If more than one file matches the query (either by
|
||||
`filename` alone, by metadata fields, or by a combination of
|
||||
both), then version ``-1`` will be the most recently uploaded
|
||||
matching file, ``-2`` the second most recently
|
||||
uploaded, etc. Version ``0`` will be the first version
|
||||
uploaded, ``1`` the second version, etc. So if three versions
|
||||
have been uploaded, then version ``0`` is the same as version
|
||||
``-3``, version ``1`` is the same as version ``-2``, and
|
||||
version ``2`` is the same as version ``-1``.
|
||||
|
||||
Raises :class:`~gridfs.errors.NoFile` if no such version of
|
||||
that file exists.
|
||||
|
||||
:Parameters:
|
||||
- `filename`: ``"filename"`` of the file to get, or `None`
|
||||
- `version` (optional): version of the file to get (defaults
|
||||
to -1, the most recent version uploaded)
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession`
|
||||
- `**kwargs` (optional): find files by custom metadata.
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
|
||||
.. versionchanged:: 3.1
|
||||
``get_version`` no longer ensures indexes.
|
||||
"""
|
||||
query = kwargs
|
||||
if filename is not None:
|
||||
query["filename"] = filename
|
||||
|
||||
cursor = self.__files.find(query, session=session)
|
||||
if version < 0:
|
||||
skip = abs(version) - 1
|
||||
cursor.limit(-1).skip(skip).sort("uploadDate", DESCENDING)
|
||||
else:
|
||||
cursor.limit(-1).skip(version).sort("uploadDate", ASCENDING)
|
||||
try:
|
||||
doc = next(cursor)
|
||||
return GridOut(
|
||||
self.__collection, file_document=doc, session=session)
|
||||
except StopIteration:
|
||||
raise NoFile("no version %d for filename %r" % (version, filename))
|
||||
|
||||
def get_last_version(self, filename=None, session=None, **kwargs):
|
||||
"""Get the most recent version of a file in GridFS by ``"filename"``
|
||||
or metadata fields.
|
||||
|
||||
Equivalent to calling :meth:`get_version` with the default
|
||||
`version` (``-1``).
|
||||
|
||||
:Parameters:
|
||||
- `filename`: ``"filename"`` of the file to get, or `None`
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession`
|
||||
- `**kwargs` (optional): find files by custom metadata.
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
"""
|
||||
return self.get_version(filename=filename, session=session, **kwargs)
|
||||
|
||||
# TODO add optional safe mode for chunk removal?
|
||||
def delete(self, file_id, session=None):
|
||||
"""Delete a file from GridFS by ``"_id"``.
|
||||
|
||||
Deletes all data belonging to the file with ``"_id"``:
|
||||
`file_id`.
|
||||
|
||||
.. warning:: Any processes/threads reading from the file while
|
||||
this method is executing will likely see an invalid/corrupt
|
||||
file. Care should be taken to avoid concurrent reads to a file
|
||||
while it is being deleted.
|
||||
|
||||
.. note:: Deletes of non-existent files are considered successful
|
||||
since the end result is the same: no file with that _id remains.
|
||||
|
||||
:Parameters:
|
||||
- `file_id`: ``"_id"`` of the file to delete
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession`
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
|
||||
.. versionchanged:: 3.1
|
||||
``delete`` no longer ensures indexes.
|
||||
"""
|
||||
self.__files.delete_one({"_id": file_id}, session=session)
|
||||
self.__chunks.delete_many({"files_id": file_id}, session=session)
|
||||
|
||||
def list(self, session=None):
|
||||
"""List the names of all files stored in this instance of
|
||||
:class:`GridFS`.
|
||||
|
||||
:Parameters:
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession`
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
|
||||
.. versionchanged:: 3.1
|
||||
``list`` no longer ensures indexes.
|
||||
"""
|
||||
# With an index, distinct includes documents with no filename
|
||||
# as None.
|
||||
return [
|
||||
name for name in self.__files.distinct("filename", session=session)
|
||||
if name is not None]
|
||||
|
||||
def find_one(self, filter=None, session=None, *args, **kwargs):
|
||||
"""Get a single file from gridfs.
|
||||
|
||||
All arguments to :meth:`find` are also valid arguments for
|
||||
:meth:`find_one`, although any `limit` argument will be
|
||||
ignored. Returns a single :class:`~gridfs.grid_file.GridOut`,
|
||||
or ``None`` if no matching file is found. For example::
|
||||
|
||||
file = fs.find_one({"filename": "lisa.txt"})
|
||||
|
||||
:Parameters:
|
||||
- `filter` (optional): a dictionary specifying
|
||||
the query to be performing OR any other type to be used as
|
||||
the value for a query for ``"_id"`` in the file collection.
|
||||
- `*args` (optional): any additional positional arguments are
|
||||
the same as the arguments to :meth:`find`.
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession`
|
||||
- `**kwargs` (optional): any additional keyword arguments
|
||||
are the same as the arguments to :meth:`find`.
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
"""
|
||||
if filter is not None and not isinstance(filter, abc.Mapping):
|
||||
filter = {"_id": filter}
|
||||
|
||||
for f in self.find(filter, *args, session=session, **kwargs):
|
||||
return f
|
||||
|
||||
return None
|
||||
|
||||
def find(self, *args, **kwargs):
|
||||
"""Query GridFS for files.
|
||||
|
||||
Returns a cursor that iterates across files matching
|
||||
arbitrary queries on the files collection. Can be combined
|
||||
with other modifiers for additional control. For example::
|
||||
|
||||
for grid_out in fs.find({"filename": "lisa.txt"},
|
||||
no_cursor_timeout=True):
|
||||
data = grid_out.read()
|
||||
|
||||
would iterate through all versions of "lisa.txt" stored in GridFS.
|
||||
Note that setting no_cursor_timeout to True may be important to
|
||||
prevent the cursor from timing out during long multi-file processing
|
||||
work.
|
||||
|
||||
As another example, the call::
|
||||
|
||||
most_recent_three = fs.find().sort("uploadDate", -1).limit(3)
|
||||
|
||||
would return a cursor to the three most recently uploaded files
|
||||
in GridFS.
|
||||
|
||||
Follows a similar interface to
|
||||
:meth:`~pymongo.collection.Collection.find`
|
||||
in :class:`~pymongo.collection.Collection`.
|
||||
|
||||
If a :class:`~pymongo.client_session.ClientSession` is passed to
|
||||
:meth:`find`, all returned :class:`~gridfs.grid_file.GridOut` instances
|
||||
are associated with that session.
|
||||
|
||||
:Parameters:
|
||||
- `filter` (optional): a SON object specifying elements which
|
||||
must be present for a document to be included in the
|
||||
result set
|
||||
- `skip` (optional): the number of files to omit (from
|
||||
the start of the result set) when returning the results
|
||||
- `limit` (optional): the maximum number of results to
|
||||
return
|
||||
- `no_cursor_timeout` (optional): if False (the default), any
|
||||
returned cursor is closed by the server after 10 minutes of
|
||||
inactivity. If set to True, the returned cursor will never
|
||||
time out on the server. Care should be taken to ensure that
|
||||
cursors with no_cursor_timeout turned on are properly closed.
|
||||
- `sort` (optional): a list of (key, direction) pairs
|
||||
specifying the sort order for this query. See
|
||||
:meth:`~pymongo.cursor.Cursor.sort` for details.
|
||||
|
||||
Raises :class:`TypeError` if any of the arguments are of
|
||||
improper type. Returns an instance of
|
||||
:class:`~gridfs.grid_file.GridOutCursor`
|
||||
corresponding to this query.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Removed the read_preference, tag_sets, and
|
||||
secondary_acceptable_latency_ms options.
|
||||
.. versionadded:: 2.7
|
||||
.. mongodoc:: find
|
||||
"""
|
||||
return GridOutCursor(self.__collection, *args, **kwargs)
|
||||
|
||||
def exists(self, document_or_id=None, session=None, **kwargs):
|
||||
"""Check if a file exists in this instance of :class:`GridFS`.
|
||||
|
||||
The file to check for can be specified by the value of its
|
||||
``_id`` key, or by passing in a query document. A query
|
||||
document can be passed in as dictionary, or by using keyword
|
||||
arguments. Thus, the following three calls are equivalent:
|
||||
|
||||
>>> fs.exists(file_id)
|
||||
>>> fs.exists({"_id": file_id})
|
||||
>>> fs.exists(_id=file_id)
|
||||
|
||||
As are the following two calls:
|
||||
|
||||
>>> fs.exists({"filename": "mike.txt"})
|
||||
>>> fs.exists(filename="mike.txt")
|
||||
|
||||
And the following two:
|
||||
|
||||
>>> fs.exists({"foo": {"$gt": 12}})
|
||||
>>> fs.exists(foo={"$gt": 12})
|
||||
|
||||
Returns ``True`` if a matching file exists, ``False``
|
||||
otherwise. Calls to :meth:`exists` will not automatically
|
||||
create appropriate indexes; application developers should be
|
||||
sure to create indexes if needed and as appropriate.
|
||||
|
||||
:Parameters:
|
||||
- `document_or_id` (optional): query document, or _id of the
|
||||
document to check for
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession`
|
||||
- `**kwargs` (optional): keyword arguments are used as a
|
||||
query document, if they're present.
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
"""
|
||||
if kwargs:
|
||||
f = self.__files.find_one(kwargs, ["_id"], session=session)
|
||||
else:
|
||||
f = self.__files.find_one(document_or_id, ["_id"], session=session)
|
||||
|
||||
return f is not None
|
||||
|
||||
|
||||
class GridFSBucket(object):
|
||||
"""An instance of GridFS on top of a single Database."""
|
||||
|
||||
def __init__(self, db, bucket_name="fs",
|
||||
chunk_size_bytes=DEFAULT_CHUNK_SIZE, write_concern=None,
|
||||
read_preference=None, disable_md5=False):
|
||||
"""Create a new instance of :class:`GridFSBucket`.
|
||||
|
||||
Raises :exc:`TypeError` if `database` is not an instance of
|
||||
:class:`~pymongo.database.Database`.
|
||||
|
||||
Raises :exc:`~pymongo.errors.ConfigurationError` if `write_concern`
|
||||
is not acknowledged.
|
||||
|
||||
:Parameters:
|
||||
- `database`: database to use.
|
||||
- `bucket_name` (optional): The name of the bucket. Defaults to 'fs'.
|
||||
- `chunk_size_bytes` (optional): The chunk size in bytes. Defaults
|
||||
to 255KB.
|
||||
- `write_concern` (optional): The
|
||||
:class:`~pymongo.write_concern.WriteConcern` to use. If ``None``
|
||||
(the default) db.write_concern is used.
|
||||
- `read_preference` (optional): The read preference to use. If
|
||||
``None`` (the default) db.read_preference is used.
|
||||
- `disable_md5` (optional): When True, MD5 checksums will not be
|
||||
computed for uploaded files. Useful in environments where MD5
|
||||
cannot be used for regulatory or other reasons. Defaults to False.
|
||||
|
||||
.. versionadded:: 3.1
|
||||
|
||||
.. mongodoc:: gridfs
|
||||
"""
|
||||
if not isinstance(db, Database):
|
||||
raise TypeError("database must be an instance of Database")
|
||||
|
||||
db = _clear_entity_type_registry(db)
|
||||
|
||||
wtc = write_concern if write_concern is not None else db.write_concern
|
||||
if not wtc.acknowledged:
|
||||
raise ConfigurationError('write concern must be acknowledged')
|
||||
|
||||
self._db = db
|
||||
self._bucket_name = bucket_name
|
||||
self._collection = db[bucket_name]
|
||||
self._disable_md5 = disable_md5
|
||||
|
||||
self._chunks = self._collection.chunks.with_options(
|
||||
write_concern=write_concern,
|
||||
read_preference=read_preference)
|
||||
|
||||
self._files = self._collection.files.with_options(
|
||||
write_concern=write_concern,
|
||||
read_preference=read_preference)
|
||||
|
||||
self._chunk_size_bytes = chunk_size_bytes
|
||||
|
||||
def open_upload_stream(self, filename, chunk_size_bytes=None,
|
||||
metadata=None, session=None):
|
||||
"""Opens a Stream that the application can write the contents of the
|
||||
file to.
|
||||
|
||||
The user must specify the filename, and can choose to add any
|
||||
additional information in the metadata field of the file document or
|
||||
modify the chunk size.
|
||||
For example::
|
||||
|
||||
my_db = MongoClient().test
|
||||
fs = GridFSBucket(my_db)
|
||||
grid_in = fs.open_upload_stream(
|
||||
"test_file", chunk_size_bytes=4,
|
||||
metadata={"contentType": "text/plain"})
|
||||
grid_in.write("data I want to store!")
|
||||
grid_in.close() # uploaded on close
|
||||
|
||||
Returns an instance of :class:`~gridfs.grid_file.GridIn`.
|
||||
|
||||
Raises :exc:`~gridfs.errors.NoFile` if no such version of
|
||||
that file exists.
|
||||
Raises :exc:`~ValueError` if `filename` is not a string.
|
||||
|
||||
:Parameters:
|
||||
- `filename`: The name of the file to upload.
|
||||
- `chunk_size_bytes` (options): The number of bytes per chunk of this
|
||||
file. Defaults to the chunk_size_bytes in :class:`GridFSBucket`.
|
||||
- `metadata` (optional): User data for the 'metadata' field of the
|
||||
files collection document. If not provided the metadata field will
|
||||
be omitted from the files collection document.
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession`
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
"""
|
||||
validate_string("filename", filename)
|
||||
|
||||
opts = {"filename": filename,
|
||||
"chunk_size": (chunk_size_bytes if chunk_size_bytes
|
||||
is not None else self._chunk_size_bytes)}
|
||||
if metadata is not None:
|
||||
opts["metadata"] = metadata
|
||||
|
||||
return GridIn(
|
||||
self._collection,
|
||||
session=session,
|
||||
disable_md5=self._disable_md5,
|
||||
**opts)
|
||||
|
||||
def open_upload_stream_with_id(
|
||||
self, file_id, filename, chunk_size_bytes=None, metadata=None,
|
||||
session=None):
|
||||
"""Opens a Stream that the application can write the contents of the
|
||||
file to.
|
||||
|
||||
The user must specify the file id and filename, and can choose to add
|
||||
any additional information in the metadata field of the file document
|
||||
or modify the chunk size.
|
||||
For example::
|
||||
|
||||
my_db = MongoClient().test
|
||||
fs = GridFSBucket(my_db)
|
||||
grid_in = fs.open_upload_stream_with_id(
|
||||
ObjectId(),
|
||||
"test_file",
|
||||
chunk_size_bytes=4,
|
||||
metadata={"contentType": "text/plain"})
|
||||
grid_in.write("data I want to store!")
|
||||
grid_in.close() # uploaded on close
|
||||
|
||||
Returns an instance of :class:`~gridfs.grid_file.GridIn`.
|
||||
|
||||
Raises :exc:`~gridfs.errors.NoFile` if no such version of
|
||||
that file exists.
|
||||
Raises :exc:`~ValueError` if `filename` is not a string.
|
||||
|
||||
:Parameters:
|
||||
- `file_id`: The id to use for this file. The id must not have
|
||||
already been used for another file.
|
||||
- `filename`: The name of the file to upload.
|
||||
- `chunk_size_bytes` (options): The number of bytes per chunk of this
|
||||
file. Defaults to the chunk_size_bytes in :class:`GridFSBucket`.
|
||||
- `metadata` (optional): User data for the 'metadata' field of the
|
||||
files collection document. If not provided the metadata field will
|
||||
be omitted from the files collection document.
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession`
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
"""
|
||||
validate_string("filename", filename)
|
||||
|
||||
opts = {"_id": file_id,
|
||||
"filename": filename,
|
||||
"chunk_size": (chunk_size_bytes if chunk_size_bytes
|
||||
is not None else self._chunk_size_bytes)}
|
||||
if metadata is not None:
|
||||
opts["metadata"] = metadata
|
||||
|
||||
return GridIn(
|
||||
self._collection,
|
||||
session=session,
|
||||
disable_md5=self._disable_md5,
|
||||
**opts)
|
||||
|
||||
def upload_from_stream(self, filename, source, chunk_size_bytes=None,
|
||||
metadata=None, session=None):
|
||||
"""Uploads a user file to a GridFS bucket.
|
||||
|
||||
Reads the contents of the user file from `source` and uploads
|
||||
it to the file `filename`. Source can be a string or file-like object.
|
||||
For example::
|
||||
|
||||
my_db = MongoClient().test
|
||||
fs = GridFSBucket(my_db)
|
||||
file_id = fs.upload_from_stream(
|
||||
"test_file",
|
||||
"data I want to store!",
|
||||
chunk_size_bytes=4,
|
||||
metadata={"contentType": "text/plain"})
|
||||
|
||||
Returns the _id of the uploaded file.
|
||||
|
||||
Raises :exc:`~gridfs.errors.NoFile` if no such version of
|
||||
that file exists.
|
||||
Raises :exc:`~ValueError` if `filename` is not a string.
|
||||
|
||||
:Parameters:
|
||||
- `filename`: The name of the file to upload.
|
||||
- `source`: The source stream of the content to be uploaded. Must be
|
||||
a file-like object that implements :meth:`read` or a string.
|
||||
- `chunk_size_bytes` (options): The number of bytes per chunk of this
|
||||
file. Defaults to the chunk_size_bytes of :class:`GridFSBucket`.
|
||||
- `metadata` (optional): User data for the 'metadata' field of the
|
||||
files collection document. If not provided the metadata field will
|
||||
be omitted from the files collection document.
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession`
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
"""
|
||||
with self.open_upload_stream(
|
||||
filename, chunk_size_bytes, metadata, session=session) as gin:
|
||||
gin.write(source)
|
||||
|
||||
return gin._id
|
||||
|
||||
def upload_from_stream_with_id(self, file_id, filename, source,
|
||||
chunk_size_bytes=None, metadata=None,
|
||||
session=None):
|
||||
"""Uploads a user file to a GridFS bucket with a custom file id.
|
||||
|
||||
Reads the contents of the user file from `source` and uploads
|
||||
it to the file `filename`. Source can be a string or file-like object.
|
||||
For example::
|
||||
|
||||
my_db = MongoClient().test
|
||||
fs = GridFSBucket(my_db)
|
||||
file_id = fs.upload_from_stream(
|
||||
ObjectId(),
|
||||
"test_file",
|
||||
"data I want to store!",
|
||||
chunk_size_bytes=4,
|
||||
metadata={"contentType": "text/plain"})
|
||||
|
||||
Raises :exc:`~gridfs.errors.NoFile` if no such version of
|
||||
that file exists.
|
||||
Raises :exc:`~ValueError` if `filename` is not a string.
|
||||
|
||||
:Parameters:
|
||||
- `file_id`: The id to use for this file. The id must not have
|
||||
already been used for another file.
|
||||
- `filename`: The name of the file to upload.
|
||||
- `source`: The source stream of the content to be uploaded. Must be
|
||||
a file-like object that implements :meth:`read` or a string.
|
||||
- `chunk_size_bytes` (options): The number of bytes per chunk of this
|
||||
file. Defaults to the chunk_size_bytes of :class:`GridFSBucket`.
|
||||
- `metadata` (optional): User data for the 'metadata' field of the
|
||||
files collection document. If not provided the metadata field will
|
||||
be omitted from the files collection document.
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession`
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
"""
|
||||
with self.open_upload_stream_with_id(
|
||||
file_id, filename, chunk_size_bytes, metadata,
|
||||
session=session) as gin:
|
||||
gin.write(source)
|
||||
|
||||
def open_download_stream(self, file_id, session=None):
|
||||
"""Opens a Stream from which the application can read the contents of
|
||||
the stored file specified by file_id.
|
||||
|
||||
For example::
|
||||
|
||||
my_db = MongoClient().test
|
||||
fs = GridFSBucket(my_db)
|
||||
# get _id of file to read.
|
||||
file_id = fs.upload_from_stream("test_file", "data I want to store!")
|
||||
grid_out = fs.open_download_stream(file_id)
|
||||
contents = grid_out.read()
|
||||
|
||||
Returns an instance of :class:`~gridfs.grid_file.GridOut`.
|
||||
|
||||
Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists.
|
||||
|
||||
:Parameters:
|
||||
- `file_id`: The _id of the file to be downloaded.
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession`
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
"""
|
||||
gout = GridOut(self._collection, file_id, session=session)
|
||||
|
||||
# Raise NoFile now, instead of on first attribute access.
|
||||
gout._ensure_file()
|
||||
return gout
|
||||
|
||||
def download_to_stream(self, file_id, destination, session=None):
|
||||
"""Downloads the contents of the stored file specified by file_id and
|
||||
writes the contents to `destination`.
|
||||
|
||||
For example::
|
||||
|
||||
my_db = MongoClient().test
|
||||
fs = GridFSBucket(my_db)
|
||||
# Get _id of file to read
|
||||
file_id = fs.upload_from_stream("test_file", "data I want to store!")
|
||||
# Get file to write to
|
||||
file = open('myfile','wb+')
|
||||
fs.download_to_stream(file_id, file)
|
||||
file.seek(0)
|
||||
contents = file.read()
|
||||
|
||||
Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists.
|
||||
|
||||
:Parameters:
|
||||
- `file_id`: The _id of the file to be downloaded.
|
||||
- `destination`: a file-like object implementing :meth:`write`.
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession`
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
"""
|
||||
with self.open_download_stream(file_id, session=session) as gout:
|
||||
for chunk in gout:
|
||||
destination.write(chunk)
|
||||
|
||||
def delete(self, file_id, session=None):
|
||||
"""Given an file_id, delete this stored file's files collection document
|
||||
and associated chunks from a GridFS bucket.
|
||||
|
||||
For example::
|
||||
|
||||
my_db = MongoClient().test
|
||||
fs = GridFSBucket(my_db)
|
||||
# Get _id of file to delete
|
||||
file_id = fs.upload_from_stream("test_file", "data I want to store!")
|
||||
fs.delete(file_id)
|
||||
|
||||
Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists.
|
||||
|
||||
:Parameters:
|
||||
- `file_id`: The _id of the file to be deleted.
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession`
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
"""
|
||||
res = self._files.delete_one({"_id": file_id}, session=session)
|
||||
self._chunks.delete_many({"files_id": file_id}, session=session)
|
||||
if not res.deleted_count:
|
||||
raise NoFile(
|
||||
"no file could be deleted because none matched %s" % file_id)
|
||||
|
||||
def find(self, *args, **kwargs):
|
||||
"""Find and return the files collection documents that match ``filter``
|
||||
|
||||
Returns a cursor that iterates across files matching
|
||||
arbitrary queries on the files collection. Can be combined
|
||||
with other modifiers for additional control.
|
||||
|
||||
For example::
|
||||
|
||||
for grid_data in fs.find({"filename": "lisa.txt"},
|
||||
no_cursor_timeout=True):
|
||||
data = grid_data.read()
|
||||
|
||||
would iterate through all versions of "lisa.txt" stored in GridFS.
|
||||
Note that setting no_cursor_timeout to True may be important to
|
||||
prevent the cursor from timing out during long multi-file processing
|
||||
work.
|
||||
|
||||
As another example, the call::
|
||||
|
||||
most_recent_three = fs.find().sort("uploadDate", -1).limit(3)
|
||||
|
||||
would return a cursor to the three most recently uploaded files
|
||||
in GridFS.
|
||||
|
||||
Follows a similar interface to
|
||||
:meth:`~pymongo.collection.Collection.find`
|
||||
in :class:`~pymongo.collection.Collection`.
|
||||
|
||||
If a :class:`~pymongo.client_session.ClientSession` is passed to
|
||||
:meth:`find`, all returned :class:`~gridfs.grid_file.GridOut` instances
|
||||
are associated with that session.
|
||||
|
||||
:Parameters:
|
||||
- `filter`: Search query.
|
||||
- `batch_size` (optional): The number of documents to return per
|
||||
batch.
|
||||
- `limit` (optional): The maximum number of documents to return.
|
||||
- `no_cursor_timeout` (optional): The server normally times out idle
|
||||
cursors after an inactivity period (10 minutes) to prevent excess
|
||||
memory use. Set this option to True prevent that.
|
||||
- `skip` (optional): The number of documents to skip before
|
||||
returning.
|
||||
- `sort` (optional): The order by which to sort results. Defaults to
|
||||
None.
|
||||
"""
|
||||
return GridOutCursor(self._collection, *args, **kwargs)
|
||||
|
||||
def open_download_stream_by_name(self, filename, revision=-1, session=None):
|
||||
"""Opens a Stream from which the application can read the contents of
|
||||
`filename` and optional `revision`.
|
||||
|
||||
For example::
|
||||
|
||||
my_db = MongoClient().test
|
||||
fs = GridFSBucket(my_db)
|
||||
grid_out = fs.open_download_stream_by_name("test_file")
|
||||
contents = grid_out.read()
|
||||
|
||||
Returns an instance of :class:`~gridfs.grid_file.GridOut`.
|
||||
|
||||
Raises :exc:`~gridfs.errors.NoFile` if no such version of
|
||||
that file exists.
|
||||
|
||||
Raises :exc:`~ValueError` filename is not a string.
|
||||
|
||||
:Parameters:
|
||||
- `filename`: The name of the file to read from.
|
||||
- `revision` (optional): Which revision (documents with the same
|
||||
filename and different uploadDate) of the file to retrieve.
|
||||
Defaults to -1 (the most recent revision).
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession`
|
||||
|
||||
:Note: Revision numbers are defined as follows:
|
||||
|
||||
- 0 = the original stored file
|
||||
- 1 = the first revision
|
||||
- 2 = the second revision
|
||||
- etc...
|
||||
- -2 = the second most recent revision
|
||||
- -1 = the most recent revision
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
"""
|
||||
validate_string("filename", filename)
|
||||
|
||||
query = {"filename": filename}
|
||||
|
||||
cursor = self._files.find(query, session=session)
|
||||
if revision < 0:
|
||||
skip = abs(revision) - 1
|
||||
cursor.limit(-1).skip(skip).sort("uploadDate", DESCENDING)
|
||||
else:
|
||||
cursor.limit(-1).skip(revision).sort("uploadDate", ASCENDING)
|
||||
try:
|
||||
grid_file = next(cursor)
|
||||
return GridOut(
|
||||
self._collection, file_document=grid_file, session=session)
|
||||
except StopIteration:
|
||||
raise NoFile(
|
||||
"no version %d for filename %r" % (revision, filename))
|
||||
|
||||
def download_to_stream_by_name(self, filename, destination, revision=-1,
|
||||
session=None):
|
||||
"""Write the contents of `filename` (with optional `revision`) to
|
||||
`destination`.
|
||||
|
||||
For example::
|
||||
|
||||
my_db = MongoClient().test
|
||||
fs = GridFSBucket(my_db)
|
||||
# Get file to write to
|
||||
file = open('myfile','wb')
|
||||
fs.download_to_stream_by_name("test_file", file)
|
||||
|
||||
Raises :exc:`~gridfs.errors.NoFile` if no such version of
|
||||
that file exists.
|
||||
|
||||
Raises :exc:`~ValueError` if `filename` is not a string.
|
||||
|
||||
:Parameters:
|
||||
- `filename`: The name of the file to read from.
|
||||
- `destination`: A file-like object that implements :meth:`write`.
|
||||
- `revision` (optional): Which revision (documents with the same
|
||||
filename and different uploadDate) of the file to retrieve.
|
||||
Defaults to -1 (the most recent revision).
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession`
|
||||
|
||||
:Note: Revision numbers are defined as follows:
|
||||
|
||||
- 0 = the original stored file
|
||||
- 1 = the first revision
|
||||
- 2 = the second revision
|
||||
- etc...
|
||||
- -2 = the second most recent revision
|
||||
- -1 = the most recent revision
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
"""
|
||||
with self.open_download_stream_by_name(
|
||||
filename, revision, session=session) as gout:
|
||||
for chunk in gout:
|
||||
destination.write(chunk)
|
||||
|
||||
def rename(self, file_id, new_filename, session=None):
|
||||
"""Renames the stored file with the specified file_id.
|
||||
|
||||
For example::
|
||||
|
||||
my_db = MongoClient().test
|
||||
fs = GridFSBucket(my_db)
|
||||
# Get _id of file to rename
|
||||
file_id = fs.upload_from_stream("test_file", "data I want to store!")
|
||||
fs.rename(file_id, "new_test_name")
|
||||
|
||||
Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists.
|
||||
|
||||
:Parameters:
|
||||
- `file_id`: The _id of the file to be renamed.
|
||||
- `new_filename`: The new name of the file.
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession`
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
"""
|
||||
result = self._files.update_one({"_id": file_id},
|
||||
{"$set": {"filename": new_filename}},
|
||||
session=session)
|
||||
if not result.matched_count:
|
||||
raise NoFile("no files could be renamed %r because none "
|
||||
"matched file_id %i" % (new_filename, file_id))
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,33 +0,0 @@
|
||||
# Copyright 2009-2015 MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Exceptions raised by the :mod:`gridfs` package"""
|
||||
|
||||
from pymongo.errors import PyMongoError
|
||||
|
||||
|
||||
class GridFSError(PyMongoError):
|
||||
"""Base class for all GridFS exceptions."""
|
||||
|
||||
|
||||
class CorruptGridFile(GridFSError):
|
||||
"""Raised when a file in :class:`~gridfs.GridFS` is malformed."""
|
||||
|
||||
|
||||
class NoFile(GridFSError):
|
||||
"""Raised when trying to read from a non-existent file."""
|
||||
|
||||
|
||||
class FileExists(GridFSError):
|
||||
"""Raised when trying to create a file that already exists."""
|
@ -1,842 +0,0 @@
|
||||
# Copyright 2009-present MongoDB, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Tools for representing files stored in GridFS."""
|
||||
import datetime
|
||||
import hashlib
|
||||
import io
|
||||
import math
|
||||
import os
|
||||
|
||||
from bson.int64 import Int64
|
||||
from bson.son import SON
|
||||
from bson.binary import Binary
|
||||
from bson.objectid import ObjectId
|
||||
from bson.py3compat import text_type, StringIO
|
||||
from gridfs.errors import CorruptGridFile, FileExists, NoFile
|
||||
from pymongo import ASCENDING
|
||||
from pymongo.collection import Collection
|
||||
from pymongo.cursor import Cursor
|
||||
from pymongo.errors import (ConfigurationError,
|
||||
CursorNotFound,
|
||||
DuplicateKeyError,
|
||||
OperationFailure)
|
||||
from pymongo.read_preferences import ReadPreference
|
||||
|
||||
try:
|
||||
_SEEK_SET = os.SEEK_SET
|
||||
_SEEK_CUR = os.SEEK_CUR
|
||||
_SEEK_END = os.SEEK_END
|
||||
# before 2.5
|
||||
except AttributeError:
|
||||
_SEEK_SET = 0
|
||||
_SEEK_CUR = 1
|
||||
_SEEK_END = 2
|
||||
|
||||
EMPTY = b""
|
||||
NEWLN = b"\n"
|
||||
|
||||
"""Default chunk size, in bytes."""
|
||||
# Slightly under a power of 2, to work well with server's record allocations.
|
||||
DEFAULT_CHUNK_SIZE = 255 * 1024
|
||||
|
||||
_C_INDEX = SON([("files_id", ASCENDING), ("n", ASCENDING)])
|
||||
_F_INDEX = SON([("filename", ASCENDING), ("uploadDate", ASCENDING)])
|
||||
|
||||
|
||||
def _grid_in_property(field_name, docstring, read_only=False,
|
||||
closed_only=False):
|
||||
"""Create a GridIn property."""
|
||||
def getter(self):
|
||||
if closed_only and not self._closed:
|
||||
raise AttributeError("can only get %r on a closed file" %
|
||||
field_name)
|
||||
# Protect against PHP-237
|
||||
if field_name == 'length':
|
||||
return self._file.get(field_name, 0)
|
||||
return self._file.get(field_name, None)
|
||||
|
||||
def setter(self, value):
|
||||
if self._closed:
|
||||
self._coll.files.update_one({"_id": self._file["_id"]},
|
||||
{"$set": {field_name: value}})
|
||||
self._file[field_name] = value
|
||||
|
||||
if read_only:
|
||||
docstring += "\n\nThis attribute is read-only."
|
||||
elif closed_only:
|
||||
docstring = "%s\n\n%s" % (docstring, "This attribute is read-only and "
|
||||
"can only be read after :meth:`close` "
|
||||
"has been called.")
|
||||
|
||||
if not read_only and not closed_only:
|
||||
return property(getter, setter, doc=docstring)
|
||||
return property(getter, doc=docstring)
|
||||
|
||||
|
||||
def _grid_out_property(field_name, docstring):
|
||||
"""Create a GridOut property."""
|
||||
def getter(self):
|
||||
self._ensure_file()
|
||||
|
||||
# Protect against PHP-237
|
||||
if field_name == 'length':
|
||||
return self._file.get(field_name, 0)
|
||||
return self._file.get(field_name, None)
|
||||
|
||||
docstring += "\n\nThis attribute is read-only."
|
||||
return property(getter, doc=docstring)
|
||||
|
||||
|
||||
def _clear_entity_type_registry(entity, **kwargs):
|
||||
"""Clear the given database/collection object's type registry."""
|
||||
codecopts = entity.codec_options.with_options(type_registry=None)
|
||||
return entity.with_options(codec_options=codecopts, **kwargs)
|
||||
|
||||
|
||||
class GridIn(object):
|
||||
"""Class to write data to GridFS.
|
||||
"""
|
||||
def __init__(
|
||||
self, root_collection, session=None, disable_md5=False, **kwargs):
|
||||
"""Write a file to GridFS
|
||||
|
||||
Application developers should generally not need to
|
||||
instantiate this class directly - instead see the methods
|
||||
provided by :class:`~gridfs.GridFS`.
|
||||
|
||||
Raises :class:`TypeError` if `root_collection` is not an
|
||||
instance of :class:`~pymongo.collection.Collection`.
|
||||
|
||||
Any of the file level options specified in the `GridFS Spec
|
||||
<http://dochub.mongodb.org/core/gridfsspec>`_ may be passed as
|
||||
keyword arguments. Any additional keyword arguments will be
|
||||
set as additional fields on the file document. Valid keyword
|
||||
arguments include:
|
||||
|
||||
- ``"_id"``: unique ID for this file (default:
|
||||
:class:`~bson.objectid.ObjectId`) - this ``"_id"`` must
|
||||
not have already been used for another file
|
||||
|
||||
- ``"filename"``: human name for the file
|
||||
|
||||
- ``"contentType"`` or ``"content_type"``: valid mime-type
|
||||
for the file
|
||||
|
||||
- ``"chunkSize"`` or ``"chunk_size"``: size of each of the
|
||||
chunks, in bytes (default: 255 kb)
|
||||
|
||||
- ``"encoding"``: encoding used for this file. In Python 2,
|
||||
any :class:`unicode` that is written to the file will be
|
||||
converted to a :class:`str`. In Python 3, any :class:`str`
|
||||
that is written to the file will be converted to
|
||||
:class:`bytes`.
|
||||
|
||||
:Parameters:
|
||||
- `root_collection`: root collection to write to
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession` to use for all
|
||||
commands
|
||||
- `disable_md5` (optional): When True, an MD5 checksum will not be
|
||||
computed for the uploaded file. Useful in environments where
|
||||
MD5 cannot be used for regulatory or other reasons. Defaults to
|
||||
False.
|
||||
- `**kwargs` (optional): file level options (see above)
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
`root_collection` must use an acknowledged
|
||||
:attr:`~pymongo.collection.Collection.write_concern`
|
||||
"""
|
||||
if not isinstance(root_collection, Collection):
|
||||
raise TypeError("root_collection must be an "
|
||||
"instance of Collection")
|
||||
|
||||
if not root_collection.write_concern.acknowledged:
|
||||
raise ConfigurationError('root_collection must use '
|
||||
'acknowledged write_concern')
|
||||
|
||||
# Handle alternative naming
|
||||
if "content_type" in kwargs:
|
||||
kwargs["contentType"] = kwargs.pop("content_type")
|
||||
if "chunk_size" in kwargs:
|
||||
kwargs["chunkSize"] = kwargs.pop("chunk_size")
|
||||
|
||||
coll = _clear_entity_type_registry(
|
||||
root_collection, read_preference=ReadPreference.PRIMARY)
|
||||
|
||||
if not disable_md5:
|
||||
kwargs["md5"] = hashlib.md5()
|
||||
# Defaults
|
||||
kwargs["_id"] = kwargs.get("_id", ObjectId())
|
||||
kwargs["chunkSize"] = kwargs.get("chunkSize", DEFAULT_CHUNK_SIZE)
|
||||
object.__setattr__(self, "_session", session)
|
||||
object.__setattr__(self, "_coll", coll)
|
||||
object.__setattr__(self, "_chunks", coll.chunks)
|
||||
object.__setattr__(self, "_file", kwargs)
|
||||
object.__setattr__(self, "_buffer", StringIO())
|
||||
object.__setattr__(self, "_position", 0)
|
||||
object.__setattr__(self, "_chunk_number", 0)
|
||||
object.__setattr__(self, "_closed", False)
|
||||
object.__setattr__(self, "_ensured_index", False)
|
||||
|
||||
def __create_index(self, collection, index_key, unique):
|
||||
doc = collection.find_one(projection={"_id": 1}, session=self._session)
|
||||
if doc is None:
|
||||
try:
|
||||
index_keys = [index_spec['key'] for index_spec in
|
||||
collection.list_indexes(session=self._session)]
|
||||
except OperationFailure:
|
||||
index_keys = []
|
||||
if index_key not in index_keys:
|
||||
collection.create_index(
|
||||
index_key.items(), unique=unique, session=self._session)
|
||||
|
||||
def __ensure_indexes(self):
|
||||
if not object.__getattribute__(self, "_ensured_index"):
|
||||
self.__create_index(self._coll.files, _F_INDEX, False)
|
||||
self.__create_index(self._coll.chunks, _C_INDEX, True)
|
||||
object.__setattr__(self, "_ensured_index", True)
|
||||
|
||||
def abort(self):
|
||||
"""Remove all chunks/files that may have been uploaded and close.
|
||||
"""
|
||||
self._coll.chunks.delete_many(
|
||||
{"files_id": self._file['_id']}, session=self._session)
|
||||
self._coll.files.delete_one(
|
||||
{"_id": self._file['_id']}, session=self._session)
|
||||
object.__setattr__(self, "_closed", True)
|
||||
|
||||
@property
|
||||
def closed(self):
|
||||
"""Is this file closed?
|
||||
"""
|
||||
return self._closed
|
||||
|
||||
_id = _grid_in_property("_id", "The ``'_id'`` value for this file.",
|
||||
read_only=True)
|
||||
filename = _grid_in_property("filename", "Name of this file.")
|
||||
name = _grid_in_property("filename", "Alias for `filename`.")
|
||||
content_type = _grid_in_property("contentType", "Mime-type for this file.")
|
||||
length = _grid_in_property("length", "Length (in bytes) of this file.",
|
||||
closed_only=True)
|
||||
chunk_size = _grid_in_property("chunkSize", "Chunk size for this file.",
|
||||
read_only=True)
|
||||
upload_date = _grid_in_property("uploadDate",
|
||||
"Date that this file was uploaded.",
|
||||
closed_only=True)
|
||||
md5 = _grid_in_property("md5", "MD5 of the contents of this file "
|
||||
"if an md5 sum was created.",
|
||||
closed_only=True)
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name in self._file:
|
||||
return self._file[name]
|
||||
raise AttributeError("GridIn object has no attribute '%s'" % name)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
# For properties of this instance like _buffer, or descriptors set on
|
||||
# the class like filename, use regular __setattr__
|
||||
if name in self.__dict__ or name in self.__class__.__dict__:
|
||||
object.__setattr__(self, name, value)
|
||||
else:
|
||||
# All other attributes are part of the document in db.fs.files.
|
||||
# Store them to be sent to server on close() or if closed, send
|
||||
# them now.
|
||||
self._file[name] = value
|
||||
if self._closed:
|
||||
self._coll.files.update_one({"_id": self._file["_id"]},
|
||||
{"$set": {name: value}})
|
||||
|
||||
def __flush_data(self, data):
|
||||
"""Flush `data` to a chunk.
|
||||
"""
|
||||
self.__ensure_indexes()
|
||||
if 'md5' in self._file:
|
||||
self._file['md5'].update(data)
|
||||
|
||||
if not data:
|
||||
return
|
||||
assert(len(data) <= self.chunk_size)
|
||||
|
||||
chunk = {"files_id": self._file["_id"],
|
||||
"n": self._chunk_number,
|
||||
"data": Binary(data)}
|
||||
|
||||
try:
|
||||
self._chunks.insert_one(chunk, session=self._session)
|
||||
except DuplicateKeyError:
|
||||
self._raise_file_exists(self._file['_id'])
|
||||
self._chunk_number += 1
|
||||
self._position += len(data)
|
||||
|
||||
def __flush_buffer(self):
|
||||
"""Flush the buffer contents out to a chunk.
|
||||
"""
|
||||
self.__flush_data(self._buffer.getvalue())
|
||||
self._buffer.close()
|
||||
self._buffer = StringIO()
|
||||
|
||||
def __flush(self):
|
||||
"""Flush the file to the database.
|
||||
"""
|
||||
try:
|
||||
self.__flush_buffer()
|
||||
|
||||
if "md5" in self._file:
|
||||
self._file["md5"] = self._file["md5"].hexdigest()
|
||||
# The GridFS spec says length SHOULD be an Int64.
|
||||
self._file["length"] = Int64(self._position)
|
||||
self._file["uploadDate"] = datetime.datetime.utcnow()
|
||||
|
||||
return self._coll.files.insert_one(
|
||||
self._file, session=self._session)
|
||||
except DuplicateKeyError:
|
||||
self._raise_file_exists(self._id)
|
||||
|
||||
def _raise_file_exists(self, file_id):
|
||||
"""Raise a FileExists exception for the given file_id."""
|
||||
raise FileExists("file with _id %r already exists" % file_id)
|
||||
|
||||
def close(self):
|
||||
"""Flush the file and close it.
|
||||
|
||||
A closed file cannot be written any more. Calling
|
||||
:meth:`close` more than once is allowed.
|
||||
"""
|
||||
if not self._closed:
|
||||
self.__flush()
|
||||
object.__setattr__(self, "_closed", True)
|
||||
|
||||
def read(self, size=-1):
|
||||
raise io.UnsupportedOperation('read')
|
||||
|
||||
def readable(self):
|
||||
return False
|
||||
|
||||
def seekable(self):
|
||||
return False
|
||||
|
||||
def write(self, data):
|
||||
"""Write data to the file. There is no return value.
|
||||
|
||||
`data` can be either a string of bytes or a file-like object
|
||||
(implementing :meth:`read`). If the file has an
|
||||
:attr:`encoding` attribute, `data` can also be a
|
||||
:class:`unicode` (:class:`str` in python 3) instance, which
|
||||
will be encoded as :attr:`encoding` before being written.
|
||||
|
||||
Due to buffering, the data may not actually be written to the
|
||||
database until the :meth:`close` method is called. Raises
|
||||
:class:`ValueError` if this file is already closed. Raises
|
||||
:class:`TypeError` if `data` is not an instance of
|
||||
:class:`str` (:class:`bytes` in python 3), a file-like object,
|
||||
or an instance of :class:`unicode` (:class:`str` in python 3).
|
||||
Unicode data is only allowed if the file has an :attr:`encoding`
|
||||
attribute.
|
||||
|
||||
:Parameters:
|
||||
- `data`: string of bytes or file-like object to be written
|
||||
to the file
|
||||
"""
|
||||
if self._closed:
|
||||
raise ValueError("cannot write to a closed file")
|
||||
|
||||
try:
|
||||
# file-like
|
||||
read = data.read
|
||||
except AttributeError:
|
||||
# string
|
||||
if not isinstance(data, (text_type, bytes)):
|
||||
raise TypeError("can only write strings or file-like objects")
|
||||
if isinstance(data, text_type):
|
||||
try:
|
||||
data = data.encode(self.encoding)
|
||||
except AttributeError:
|
||||
raise TypeError("must specify an encoding for file in "
|
||||
"order to write %s" % (text_type.__name__,))
|
||||
read = StringIO(data).read
|
||||
|
||||
if self._buffer.tell() > 0:
|
||||
# Make sure to flush only when _buffer is complete
|
||||
space = self.chunk_size - self._buffer.tell()
|
||||
if space:
|
||||
try:
|
||||
to_write = read(space)
|
||||
except:
|
||||
self.abort()
|
||||
raise
|
||||
self._buffer.write(to_write)
|
||||
if len(to_write) < space:
|
||||
return # EOF or incomplete
|
||||
self.__flush_buffer()
|
||||
to_write = read(self.chunk_size)
|
||||
while to_write and len(to_write) == self.chunk_size:
|
||||
self.__flush_data(to_write)
|
||||
to_write = read(self.chunk_size)
|
||||
self._buffer.write(to_write)
|
||||
|
||||
def writelines(self, sequence):
|
||||
"""Write a sequence of strings to the file.
|
||||
|
||||
Does not add seperators.
|
||||
"""
|
||||
for line in sequence:
|
||||
self.write(line)
|
||||
|
||||
def writeable(self):
|
||||
return True
|
||||
|
||||
def __enter__(self):
|
||||
"""Support for the context manager protocol.
|
||||
"""
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Support for the context manager protocol.
|
||||
|
||||
Close the file and allow exceptions to propagate.
|
||||
"""
|
||||
self.close()
|
||||
|
||||
# propagate exceptions
|
||||
return False
|
||||
|
||||
|
||||
class GridOut(object):
|
||||
"""Class to read data out of GridFS.
|
||||
"""
|
||||
def __init__(self, root_collection, file_id=None, file_document=None,
|
||||
session=None):
|
||||
"""Read a file from GridFS
|
||||
|
||||
Application developers should generally not need to
|
||||
instantiate this class directly - instead see the methods
|
||||
provided by :class:`~gridfs.GridFS`.
|
||||
|
||||
Either `file_id` or `file_document` must be specified,
|
||||
`file_document` will be given priority if present. Raises
|
||||
:class:`TypeError` if `root_collection` is not an instance of
|
||||
:class:`~pymongo.collection.Collection`.
|
||||
|
||||
:Parameters:
|
||||
- `root_collection`: root collection to read from
|
||||
- `file_id` (optional): value of ``"_id"`` for the file to read
|
||||
- `file_document` (optional): file document from
|
||||
`root_collection.files`
|
||||
- `session` (optional): a
|
||||
:class:`~pymongo.client_session.ClientSession` to use for all
|
||||
commands
|
||||
|
||||
.. versionchanged:: 3.8
|
||||
For better performance and to better follow the GridFS spec,
|
||||
:class:`GridOut` now uses a single cursor to read all the chunks in
|
||||
the file.
|
||||
|
||||
.. versionchanged:: 3.6
|
||||
Added ``session`` parameter.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Creating a GridOut does not immediately retrieve the file metadata
|
||||
from the server. Metadata is fetched when first needed.
|
||||
"""
|
||||
if not isinstance(root_collection, Collection):
|
||||
raise TypeError("root_collection must be an "
|
||||
"instance of Collection")
|
||||
|
||||
root_collection = _clear_entity_type_registry(root_collection)
|
||||
|
||||
self.__chunks = root_collection.chunks
|
||||
self.__files = root_collection.files
|
||||
self.__file_id = file_id
|
||||
self.__buffer = EMPTY
|
||||
self.__chunk_iter = None
|
||||
self.__position = 0
|
||||
self._file = file_document
|
||||
self._session = session
|
||||
|
||||
_id = _grid_out_property("_id", "The ``'_id'`` value for this file.")
|
||||
filename = _grid_out_property("filename", "Name of this file.")
|
||||
name = _grid_out_property("filename", "Alias for `filename`.")
|
||||
content_type = _grid_out_property("contentType", "Mime-type for this file.")
|
||||
length = _grid_out_property("length", "Length (in bytes) of this file.")
|
||||
chunk_size = _grid_out_property("chunkSize", "Chunk size for this file.")
|
||||
upload_date = _grid_out_property("uploadDate",
|
||||
"Date that this file was first uploaded.")
|
||||
aliases = _grid_out_property("aliases", "List of aliases for this file.")
|
||||
metadata = _grid_out_property("metadata", "Metadata attached to this file.")
|
||||
md5 = _grid_out_property("md5", "MD5 of the contents of this file "
|
||||
"if an md5 sum was created.")
|
||||
|
||||
def _ensure_file(self):
|
||||
if not self._file:
|
||||
self._file = self.__files.find_one({"_id": self.__file_id},
|
||||
session=self._session)
|
||||
if not self._file:
|
||||
raise NoFile("no file in gridfs collection %r with _id %r" %
|
||||
(self.__files, self.__file_id))
|
||||
|
||||
def __getattr__(self, name):
|
||||
self._ensure_file()
|
||||
if name in self._file:
|
||||
return self._file[name]
|
||||
raise AttributeError("GridOut object has no attribute '%s'" % name)
|
||||
|
||||
def readable(self):
|
||||
return True
|
||||
|
||||
def readchunk(self):
|
||||
"""Reads a chunk at a time. If the current position is within a
|
||||
chunk the remainder of the chunk is returned.
|
||||
"""
|
||||
received = len(self.__buffer)
|
||||
chunk_data = EMPTY
|
||||
chunk_size = int(self.chunk_size)
|
||||
|
||||
if received > 0:
|
||||
chunk_data = self.__buffer
|
||||
elif self.__position < int(self.length):
|
||||
chunk_number = int((received + self.__position) / chunk_size)
|
||||
if self.__chunk_iter is None:
|
||||
self.__chunk_iter = _GridOutChunkIterator(
|
||||
self, self.__chunks, self._session, chunk_number)
|
||||
|
||||
chunk = self.__chunk_iter.next()
|
||||
chunk_data = chunk["data"][self.__position % chunk_size:]
|
||||
|
||||
if not chunk_data:
|
||||
raise CorruptGridFile("truncated chunk")
|
||||
|
||||
self.__position += len(chunk_data)
|
||||
self.__buffer = EMPTY
|
||||
return chunk_data
|
||||
|
||||
def read(self, size=-1):
|
||||
"""Read at most `size` bytes from the file (less if there
|
||||
isn't enough data).
|
||||
|
||||
The bytes are returned as an instance of :class:`str` (:class:`bytes`
|
||||
in python 3). If `size` is negative or omitted all data is read.
|
||||
|
||||
:Parameters:
|
||||
- `size` (optional): the number of bytes to read
|
||||
|
||||
.. versionchanged:: 3.8
|
||||
This method now only checks for extra chunks after reading the
|
||||
entire file. Previously, this method would check for extra chunks
|
||||
on every call.
|
||||
"""
|
||||
self._ensure_file()
|
||||
|
||||
remainder = int(self.length) - self.__position
|
||||
if size < 0 or size > remainder:
|
||||
size = remainder
|
||||
|
||||
if size == 0:
|
||||
return EMPTY
|
||||
|
||||
received = 0
|
||||
data = StringIO()
|
||||
while received < size:
|
||||
chunk_data = self.readchunk()
|
||||
received += len(chunk_data)
|
||||
data.write(chunk_data)
|
||||
|
||||
# Detect extra chunks after reading the entire file.
|
||||
if size == remainder and self.__chunk_iter:
|
||||
try:
|
||||
self.__chunk_iter.next()
|
||||
except StopIteration:
|
||||
pass
|
||||
|
||||
self.__position -= received - size
|
||||
|
||||
# Return 'size' bytes and store the rest.
|
||||
data.seek(size)
|
||||
self.__buffer = data.read()
|
||||
data.seek(0)
|
||||
return data.read(size)
|
||||
|
||||
def readline(self, size=-1):
|
||||
"""Read one line or up to `size` bytes from the file.
|
||||
|
||||
:Parameters:
|
||||
- `size` (optional): the maximum number of bytes to read
|
||||
"""
|
||||
remainder = int(self.length) - self.__position
|
||||
if size < 0 or size > remainder:
|
||||
size = remainder
|
||||
|
||||
if size == 0:
|
||||
return EMPTY
|
||||
|
||||
received = 0
|
||||
data = StringIO()
|
||||
while received < size:
|
||||
chunk_data = self.readchunk()
|
||||
pos = chunk_data.find(NEWLN, 0, size)
|
||||
if pos != -1:
|
||||
size = received + pos + 1
|
||||
|
||||
received += len(chunk_data)
|
||||
data.write(chunk_data)
|
||||
if pos != -1:
|
||||
break
|
||||
|
||||
self.__position -= received - size
|
||||
|
||||
# Return 'size' bytes and store the rest.
|
||||
data.seek(size)
|
||||
self.__buffer = data.read()
|
||||
data.seek(0)
|
||||
return data.read(size)
|
||||
|
||||
def tell(self):
|
||||
"""Return the current position of this file.
|
||||
"""
|
||||
return self.__position
|
||||
|
||||
def seek(self, pos, whence=_SEEK_SET):
|
||||
"""Set the current position of this file.
|
||||
|
||||
:Parameters:
|
||||
- `pos`: the position (or offset if using relative
|
||||
positioning) to seek to
|
||||
- `whence` (optional): where to seek
|
||||
from. :attr:`os.SEEK_SET` (``0``) for absolute file
|
||||
positioning, :attr:`os.SEEK_CUR` (``1``) to seek relative
|
||||
to the current position, :attr:`os.SEEK_END` (``2``) to
|
||||
seek relative to the file's end.
|
||||
"""
|
||||
if whence == _SEEK_SET:
|
||||
new_pos = pos
|
||||
elif whence == _SEEK_CUR:
|
||||
new_pos = self.__position + pos
|
||||
elif whence == _SEEK_END:
|
||||
new_pos = int(self.length) + pos
|
||||
else:
|
||||
raise IOError(22, "Invalid value for `whence`")
|
||||
|
||||
if new_pos < 0:
|
||||
raise IOError(22, "Invalid value for `pos` - must be positive")
|
||||
|
||||
# Optimization, continue using the same buffer and chunk iterator.
|
||||
if new_pos == self.__position:
|
||||
return
|
||||
|
||||
self.__position = new_pos
|
||||
self.__buffer = EMPTY
|
||||
if self.__chunk_iter:
|
||||
self.__chunk_iter.close()
|
||||
self.__chunk_iter = None
|
||||
|
||||
def seekable(self):
|
||||
return True
|
||||
|
||||
def __iter__(self):
|
||||
"""Return an iterator over all of this file's data.
|
||||
|
||||
The iterator will return chunk-sized instances of
|
||||
:class:`str` (:class:`bytes` in python 3). This can be
|
||||
useful when serving files using a webserver that handles
|
||||
such an iterator efficiently.
|
||||
|
||||
.. note::
|
||||
This is different from :py:class:`io.IOBase` which iterates over
|
||||
*lines* in the file. Use :meth:`GridOut.readline` to read line by
|
||||
line instead of chunk by chunk.
|
||||
|
||||
.. versionchanged:: 3.8
|
||||
The iterator now raises :class:`CorruptGridFile` when encountering
|
||||
any truncated, missing, or extra chunk in a file. The previous
|
||||
behavior was to only raise :class:`CorruptGridFile` on a missing
|
||||
chunk.
|
||||
"""
|
||||
return GridOutIterator(self, self.__chunks, self._session)
|
||||
|
||||
def close(self):
|
||||
"""Make GridOut more generically file-like."""
|
||||
if self.__chunk_iter:
|
||||
self.__chunk_iter.close()
|
||||
self.__chunk_iter = None
|
||||
|
||||
def write(self, value):
|
||||
raise io.UnsupportedOperation('write')
|
||||
|
||||
def __enter__(self):
|
||||
"""Makes it possible to use :class:`GridOut` files
|
||||
with the context manager protocol.
|
||||
"""
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Makes it possible to use :class:`GridOut` files
|
||||
with the context manager protocol.
|
||||
"""
|
||||
self.close()
|
||||
return False
|
||||
|
||||
|
||||
class _GridOutChunkIterator(object):
|
||||
"""Iterates over a file's chunks using a single cursor.
|
||||
|
||||
Raises CorruptGridFile when encountering any truncated, missing, or extra
|
||||
chunk in a file.
|
||||
"""
|
||||
def __init__(self, grid_out, chunks, session, next_chunk):
|
||||
self._id = grid_out._id
|
||||
self._chunk_size = int(grid_out.chunk_size)
|
||||
self._length = int(grid_out.length)
|
||||
self._chunks = chunks
|
||||
self._session = session
|
||||
self._next_chunk = next_chunk
|
||||
self._num_chunks = math.ceil(float(self._length) / self._chunk_size)
|
||||
self._cursor = None
|
||||
|
||||
def expected_chunk_length(self, chunk_n):
|
||||
if chunk_n < self._num_chunks - 1:
|
||||
return self._chunk_size
|
||||
return self._length - (self._chunk_size * (self._num_chunks - 1))
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def _create_cursor(self):
|
||||
filter = {"files_id": self._id}
|
||||
if self._next_chunk > 0:
|
||||
filter["n"] = {"$gte": self._next_chunk}
|
||||
self._cursor = self._chunks.find(filter, sort=[("n", 1)],
|
||||
session=self._session)
|
||||
|
||||
def _next_with_retry(self):
|
||||
"""Return the next chunk and retry once on CursorNotFound.
|
||||
|
||||
We retry on CursorNotFound to maintain backwards compatibility in
|
||||
cases where two calls to read occur more than 10 minutes apart (the
|
||||
server's default cursor timeout).
|
||||
"""
|
||||
if self._cursor is None:
|
||||
self._create_cursor()
|
||||
|
||||
try:
|
||||
return self._cursor.next()
|
||||
except CursorNotFound:
|
||||
self._cursor.close()
|
||||
self._create_cursor()
|
||||
return self._cursor.next()
|
||||
|
||||
def next(self):
|
||||
try:
|
||||
chunk = self._next_with_retry()
|
||||
except StopIteration:
|
||||
if self._next_chunk >= self._num_chunks:
|
||||
raise
|
||||
raise CorruptGridFile("no chunk #%d" % self._next_chunk)
|
||||
|
||||
if chunk["n"] != self._next_chunk:
|
||||
self.close()
|
||||
raise CorruptGridFile(
|
||||
"Missing chunk: expected chunk #%d but found "
|
||||
"chunk with n=%d" % (self._next_chunk, chunk["n"]))
|
||||
|
||||
if chunk["n"] >= self._num_chunks:
|
||||
# According to spec, ignore extra chunks if they are empty.
|
||||
if len(chunk["data"]):
|
||||
self.close()
|
||||
raise CorruptGridFile(
|
||||
"Extra chunk found: expected %d chunks but found "
|
||||
"chunk with n=%d" % (self._num_chunks, chunk["n"]))
|
||||
|
||||
expected_length = self.expected_chunk_length(chunk["n"])
|
||||
if len(chunk["data"]) != expected_length:
|
||||
self.close()
|
||||
raise CorruptGridFile(
|
||||
"truncated chunk #%d: expected chunk length to be %d but "
|
||||
"found chunk with length %d" % (
|
||||
chunk["n"], expected_length, len(chunk["data"])))
|
||||
|
||||
self._next_chunk += 1
|
||||
return chunk
|
||||
|
||||
__next__ = next
|
||||
|
||||
def close(self):
|
||||
if self._cursor:
|
||||
self._cursor.close()
|
||||
self._cursor = None
|
||||
|
||||
|
||||
class GridOutIterator(object):
|
||||
def __init__(self, grid_out, chunks, session):
|
||||
self.__chunk_iter = _GridOutChunkIterator(grid_out, chunks, session, 0)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
chunk = self.__chunk_iter.next()
|
||||
return bytes(chunk["data"])
|
||||
|
||||
__next__ = next
|
||||
|
||||
|
||||
class GridOutCursor(Cursor):
|
||||
"""A cursor / iterator for returning GridOut objects as the result
|
||||
of an arbitrary query against the GridFS files collection.
|
||||
"""
|
||||
def __init__(self, collection, filter=None, skip=0, limit=0,
|
||||
no_cursor_timeout=False, sort=None, batch_size=0,
|
||||
session=None):
|
||||
"""Create a new cursor, similar to the normal
|
||||
:class:`~pymongo.cursor.Cursor`.
|
||||
|
||||
Should not be called directly by application developers - see
|
||||
the :class:`~gridfs.GridFS` method :meth:`~gridfs.GridFS.find` instead.
|
||||
|
||||
.. versionadded 2.7
|
||||
|
||||
.. mongodoc:: cursors
|
||||
"""
|
||||
collection = _clear_entity_type_registry(collection)
|
||||
|
||||
# Hold on to the base "fs" collection to create GridOut objects later.
|
||||
self.__root_collection = collection
|
||||
|
||||
super(GridOutCursor, self).__init__(
|
||||
collection.files, filter, skip=skip, limit=limit,
|
||||
no_cursor_timeout=no_cursor_timeout, sort=sort,
|
||||
batch_size=batch_size, session=session)
|
||||
|
||||
def next(self):
|
||||
"""Get next GridOut object from cursor.
|
||||
"""
|
||||
# Work around "super is not iterable" issue in Python 3.x
|
||||
next_file = super(GridOutCursor, self).next()
|
||||
return GridOut(self.__root_collection, file_document=next_file,
|
||||
session=self.session)
|
||||
|
||||
__next__ = next
|
||||
|
||||
def add_option(self, *args, **kwargs):
|
||||
raise NotImplementedError("Method does not exist for GridOutCursor")
|
||||
|
||||
def remove_option(self, *args, **kwargs):
|
||||
raise NotImplementedError("Method does not exist for GridOutCursor")
|
||||
|
||||
def _clone_base(self, session):
|
||||
"""Creates an empty GridOutCursor for information to be copied into.
|
||||
"""
|
||||
return GridOutCursor(self.__root_collection, session=session)
|
@ -1 +0,0 @@
|
||||
pip
|
@ -1,20 +0,0 @@
|
||||
Copyright (c) 2008-2019 The pip developers (see AUTHORS.txt file)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
@ -1,84 +0,0 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: pip
|
||||
Version: 20.0.2
|
||||
Summary: The PyPA recommended tool for installing Python packages.
|
||||
Home-page: https://pip.pypa.io/
|
||||
Author: The pip developers
|
||||
Author-email: pypa-dev@groups.google.com
|
||||
License: MIT
|
||||
Project-URL: Documentation, https://pip.pypa.io
|
||||
Project-URL: Source, https://github.com/pypa/pip
|
||||
Keywords: distutils easy_install egg setuptools wheel virtualenv
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Topic :: Software Development :: Build Tools
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*
|
||||
|
||||
pip - The Python Package Installer
|
||||
==================================
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/pip.svg
|
||||
:target: https://pypi.org/project/pip/
|
||||
|
||||
.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
|
||||
:target: https://pip.pypa.io/en/latest
|
||||
|
||||
pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
|
||||
|
||||
Please take a look at our documentation for how to install and use pip:
|
||||
|
||||
* `Installation`_
|
||||
* `Usage`_
|
||||
|
||||
Updates are released regularly, with a new version every 3 months. More details can be found in our documentation:
|
||||
|
||||
* `Release notes`_
|
||||
* `Release process`_
|
||||
|
||||
If you find bugs, need help, or want to talk to the developers please use our mailing lists or chat rooms:
|
||||
|
||||
* `Issue tracking`_
|
||||
* `Discourse channel`_
|
||||
* `User IRC`_
|
||||
|
||||
If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms:
|
||||
|
||||
* `GitHub page`_
|
||||
* `Dev documentation`_
|
||||
* `Dev mailing list`_
|
||||
* `Dev IRC`_
|
||||
|
||||
Code of Conduct
|
||||
---------------
|
||||
|
||||
Everyone interacting in the pip project's codebases, issue trackers, chat
|
||||
rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_.
|
||||
|
||||
.. _package installer: https://packaging.python.org/guides/tool-recommendations/
|
||||
.. _Python Package Index: https://pypi.org
|
||||
.. _Installation: https://pip.pypa.io/en/stable/installing.html
|
||||
.. _Usage: https://pip.pypa.io/en/stable/
|
||||
.. _Release notes: https://pip.pypa.io/en/stable/news.html
|
||||
.. _Release process: https://pip.pypa.io/en/latest/development/release-process/
|
||||
.. _GitHub page: https://github.com/pypa/pip
|
||||
.. _Dev documentation: https://pip.pypa.io/en/latest/development
|
||||
.. _Issue tracking: https://github.com/pypa/pip/issues
|
||||
.. _Discourse channel: https://discuss.python.org/c/packaging
|
||||
.. _Dev mailing list: https://groups.google.com/forum/#!forum/pypa-dev
|
||||
.. _User IRC: https://webchat.freenode.net/?channels=%23pypa
|
||||
.. _Dev IRC: https://webchat.freenode.net/?channels=%23pypa-dev
|
||||
.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/
|
||||
|
||||
|
@ -1,752 +0,0 @@
|
||||
pip/__init__.py,sha256=U1AM82iShMaw90K6Yq0Q2-AZ1EsOcqQLQRB-rxwFtII,455
|
||||
pip/__main__.py,sha256=NM95x7KuQr-lwPoTjAC0d_QzLJsJjpmAoxZg0mP8s98,632
|
||||
pip/_internal/__init__.py,sha256=j5fiII6yCeZjpW7_7wAVRMM4DwE-gyARGVU4yAADDeE,517
|
||||
pip/_internal/build_env.py,sha256=--aNgzIdYrCOclHMwoAdpclCpfdFE_jooRuCy5gczwg,7532
|
||||
pip/_internal/cache.py,sha256=16GrnDRLBQNlfKWIuIF6Sa-EFS78kez_w1WEjT3ykTI,11605
|
||||
pip/_internal/configuration.py,sha256=MgKrLFBJBkF3t2VJM4tvlnEspfSuS4scp_LhHWh53nY,14222
|
||||
pip/_internal/exceptions.py,sha256=6YRuwXAK6F1iyUWKIkCIpWWN2khkAn1sZOgrFA9S8Ro,10247
|
||||
pip/_internal/legacy_resolve.py,sha256=L7R72I7CjVgJlPTggmA1j4b-H8NmxNu_dKVhrpGXGps,16277
|
||||
pip/_internal/locations.py,sha256=VifFEqhc7FWFV8QGoEM3CpECRY8Doq7kTytytxsEgx0,6734
|
||||
pip/_internal/main.py,sha256=IVBnUQ-FG7DK6617uEXRB5_QJqspAsBFmTmTesYkbdQ,437
|
||||
pip/_internal/pep425tags.py,sha256=SlIQokevkoKnXhoK3PZvXiDoj8hFKoJ7thDifDtga3k,5490
|
||||
pip/_internal/pyproject.py,sha256=kB966ZCSxiZQRa3W2RXN9as5pRuKW6Elnb4xdqDxASg,7404
|
||||
pip/_internal/self_outdated_check.py,sha256=3KO1pTJUuYaiV9X0t87I9PimkGL82HbhLWbocqKZpBU,8009
|
||||
pip/_internal/wheel_builder.py,sha256=gr9jE14W5ZuYblpldo-tpRuyG0e0AVmHLttImuAvXlE,9441
|
||||
pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132
|
||||
pip/_internal/cli/autocompletion.py,sha256=ekGNtcDI0p7rFVc-7s4T9Tbss4Jgb7vsB649XJIblRg,6547
|
||||
pip/_internal/cli/base_command.py,sha256=v6yl5XNRqye8BT9ep8wvpMu6lylP_Hu6D95r_HqbpbQ,7948
|
||||
pip/_internal/cli/cmdoptions.py,sha256=pppqSTy3R7YLFqSRLdVmwwO6ZIvQ1MmWYHPaXr4wvuI,28115
|
||||
pip/_internal/cli/command_context.py,sha256=ygMVoTy2jpNilKT-6416gFSQpaBtrKRBbVbi2fy__EU,975
|
||||
pip/_internal/cli/main.py,sha256=8iq3bHe5lxJTB2EvKOqZ38NS0MmoS79_S1kgj4QuH8A,2610
|
||||
pip/_internal/cli/main_parser.py,sha256=W9OWeryh7ZkqELohaFh0Ko9sB98ZkSeDmnYbOZ1imBc,2819
|
||||
pip/_internal/cli/parser.py,sha256=O9djTuYQuSfObiY-NU6p4MJCfWsRUnDpE2YGA_fwols,9487
|
||||
pip/_internal/cli/req_command.py,sha256=pAUAglpTn0mUA6lRs7KN71yOm1KDabD0ySVTQTqWTSA,12463
|
||||
pip/_internal/cli/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156
|
||||
pip/_internal/commands/__init__.py,sha256=uTSj58QlrSKeXqCUSdL-eAf_APzx5BHy1ABxb0j5ZNE,3714
|
||||
pip/_internal/commands/check.py,sha256=mgLNYT3bd6Kmynwh4zzcBmVlFZ-urMo40jTgk6U405E,1505
|
||||
pip/_internal/commands/completion.py,sha256=UFQvq0Q4_B96z1bvnQyMOq82aPSu05RejbLmqeTZjC0,2975
|
||||
pip/_internal/commands/configuration.py,sha256=6riioZjMhsNSEct7dE-X8SobGodk3WERKJvuyjBje4Q,7226
|
||||
pip/_internal/commands/debug.py,sha256=a8llax2hRkxgK-tvwdJgaCaZCYPIx0fDvrlMDoYr8bQ,4209
|
||||
pip/_internal/commands/download.py,sha256=zX_0-IeFb4C8dxSmGHxk-6H5kehtyTSsdWpjNpAhSww,5007
|
||||
pip/_internal/commands/freeze.py,sha256=G9I_yoBHlpWLX1qItsWNOmmqc8ET7pekFybdbV333d4,3464
|
||||
pip/_internal/commands/hash.py,sha256=47teimfAPhpkaVbSDaafck51BT3XXYuL83lAqc5lOcE,1735
|
||||
pip/_internal/commands/help.py,sha256=Nhecq--ydFn80Gm1Zvbf9943EcRJfO0TnXUhsF0RO7s,1181
|
||||
pip/_internal/commands/install.py,sha256=WYv_h_pIcO7i-Iur2Y84GfzOEB5UJmQ-xY74ZJwA8dw,26252
|
||||
pip/_internal/commands/list.py,sha256=RqjYu3-Bo_mdh5BYbq1zBU_WTz2C8H8S2Z0_1EG7GXA,10563
|
||||
pip/_internal/commands/search.py,sha256=7Il8nKZ9mM7qF5jlnBoPvSIFY9f-0-5IbYoX3miTuZY,5148
|
||||
pip/_internal/commands/show.py,sha256=Vzsj2oX0JBl94MPyF3LV8YoMcigl8B2UsMM8zp0pH2s,6792
|
||||
pip/_internal/commands/uninstall.py,sha256=8mldFbrQecSoWDZRqxBgJkrlvx6Y9Iy7cs-2BIgtXt4,2983
|
||||
pip/_internal/commands/wheel.py,sha256=TMU5ZhjLo7BIZQApGPsYfoCsbGTnvP-N9jkgPJXhj1Y,7170
|
||||
pip/_internal/distributions/__init__.py,sha256=ECBUW5Gtu9TjJwyFLvim-i6kUMYVuikNh9I5asL6tbA,959
|
||||
pip/_internal/distributions/base.py,sha256=ruprpM_L2T2HNi3KLUHlbHimZ1sWVw-3Q0Lb8O7TDAI,1425
|
||||
pip/_internal/distributions/installed.py,sha256=YqlkBKr6TVP1MAYS6SG8ojud21wVOYLMZ8jMLJe9MSU,760
|
||||
pip/_internal/distributions/sdist.py,sha256=D4XTMlCwgPlK69l62GLYkNSVTVe99fR5iAcVt2EbGok,4086
|
||||
pip/_internal/distributions/wheel.py,sha256=95uD-TfaYoq3KiKBdzk9YMN4RRqJ28LNoSTS2K46gek,1294
|
||||
pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30
|
||||
pip/_internal/index/collector.py,sha256=YS7Ix4oylU7ZbPTPFugh-244GSRqMvdHsGUG6nmz2gE,17892
|
||||
pip/_internal/index/package_finder.py,sha256=2Rg75AOpLj8BN1jyL8EI-Iw-Hv6ibJkrYVARCht3bX8,37542
|
||||
pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63
|
||||
pip/_internal/models/candidate.py,sha256=Y58Bcm6oXUj0iS-yhmerlGo5CQJI2p0Ww9h6hR9zQDw,1150
|
||||
pip/_internal/models/format_control.py,sha256=ICzVjjGwfZYdX-eLLKHjMHLutEJlAGpfj09OG_eMqac,2673
|
||||
pip/_internal/models/index.py,sha256=K59A8-hVhBM20Xkahr4dTwP7OjkJyEqXH11UwHFVgqM,1060
|
||||
pip/_internal/models/link.py,sha256=y0H2ZOk0P6d1lfGUL2Pl09xFgZcRt5HwN2LElMifOpI,6827
|
||||
pip/_internal/models/scheme.py,sha256=vvhBrrno7eVDXcdKHiZWwxhPHf4VG5uSCEkC0QDR2RU,679
|
||||
pip/_internal/models/search_scope.py,sha256=2LXbU4wV8LwqdtXQXNXFYKv-IxiDI_QwSz9ZgbwtAfk,3898
|
||||
pip/_internal/models/selection_prefs.py,sha256=rPeif2KKjhTPXeMoQYffjqh10oWpXhdkxRDaPT1HO8k,1908
|
||||
pip/_internal/models/target_python.py,sha256=c-cFi6zCuo5HYbXNS3rVVpKRaHVh5yQlYEjEW23SidQ,3799
|
||||
pip/_internal/models/wheel.py,sha256=6KLuLKH5b0C5goWQXGSISRaq2UZtkHUEAU1y1Zsrwms,2766
|
||||
pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50
|
||||
pip/_internal/network/auth.py,sha256=K3G1ukKb3PiH8w_UnpXTz8qQsTULO-qdbfOE9zTo1fE,11119
|
||||
pip/_internal/network/cache.py,sha256=51CExcRkXWrgMZ7WsrZ6cmijKfViD5tVgKbBvJHO1IE,2394
|
||||
pip/_internal/network/download.py,sha256=3D9vdJmVwmCUMxzC-TaVI_GvVOpQna3BLEYNPCSx3Fc,6260
|
||||
pip/_internal/network/session.py,sha256=u1IXQfv21R1xv86ulyiB58-be4sYm90eFB0Wp8fVMYw,14702
|
||||
pip/_internal/network/utils.py,sha256=iiixo1OeaQ3niUWiBjg59PN6f1w7vvTww1vFriTD_IU,1959
|
||||
pip/_internal/network/xmlrpc.py,sha256=AL115M3vFJ8xiHVJneb8Hi0ZFeRvdPhblC89w25OG5s,1597
|
||||
pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pip/_internal/operations/check.py,sha256=a6uHG0daoWpmSPCdL7iYJaGQYZ-CRvPvTnCv2PnIIs0,5353
|
||||
pip/_internal/operations/freeze.py,sha256=td4BeRnW10EXFTZrx6VgygO3CrjqD5B9f0BGzjQm-Ew,10180
|
||||
pip/_internal/operations/prepare.py,sha256=ro2teBlbBpkRJhBKraP9CoJgVLpueSk62ziWhRToXww,20942
|
||||
pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pip/_internal/operations/build/metadata.py,sha256=yHMi5gHYXcXyHcvUPWHdO-UyOo3McFWljn_nHfM1O9c,1307
|
||||
pip/_internal/operations/build/metadata_legacy.py,sha256=4n6N7BTysqVmEpITzT2UVClyt0Peij_Im8Qm965IWB4,3957
|
||||
pip/_internal/operations/build/wheel.py,sha256=ntltdNP6D2Tpr4V0agssu6rE0F9LaBpJkYT6zSdhEbw,1469
|
||||
pip/_internal/operations/build/wheel_legacy.py,sha256=DYSxQKutwSZnmNvWkwsl2HzE2XQBxV0i0wTphjtUe90,3349
|
||||
pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51
|
||||
pip/_internal/operations/install/editable_legacy.py,sha256=rJ_xs2qtDUjpY2-n6eYlVyZiNoKbOtZXZrYrcnIELt4,1488
|
||||
pip/_internal/operations/install/legacy.py,sha256=eBV8gHbO9sBlBc-4nuR3Sd2nikHgEcnC9khfeLiypio,4566
|
||||
pip/_internal/operations/install/wheel.py,sha256=xdCjH6uIUyg39Pf8tUaMFUN4a7eozJAFMb_wKcgQlsY,23012
|
||||
pip/_internal/req/__init__.py,sha256=UVaYPlHZVGRBQQPjvGC_6jJDQtewXm0ws-8Lxhg_TiY,2671
|
||||
pip/_internal/req/constructors.py,sha256=w5-kWWVCqlSqcIBitw86yq7XGMPpKrHDfQZSE2mJ_xc,14388
|
||||
pip/_internal/req/req_file.py,sha256=ECqRUicCw5Y08R1YynZAAp8dSKQhDXoc1Q-mY3a9b6I,18485
|
||||
pip/_internal/req/req_install.py,sha256=wjsIr4lDpbVSLqANKJI9mXwRVHaRxcnj8q30UiHoLRA,30442
|
||||
pip/_internal/req/req_set.py,sha256=GsrKmupRKhNMhjkofVfCEHEHfgEvYBxClaQH5xLBQHg,8066
|
||||
pip/_internal/req/req_tracker.py,sha256=27fvVG8Y2MJS1KpU2rBMnQyUEMHG4lkHT_bzbzQK-c0,4723
|
||||
pip/_internal/req/req_uninstall.py,sha256=DWnOsuyYGju6-sylyoCm7GtUNevn9qMAVhjAGLcdXUE,23609
|
||||
pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pip/_internal/utils/appdirs.py,sha256=frpKbfJiyKLgpPDYNDrPtkfaZ0akY9SyB7ryPV29sMg,1144
|
||||
pip/_internal/utils/compat.py,sha256=D7FKGLBdQwWH-dHIGaoWMawDZWBYApvtJVL1kFPJ930,8869
|
||||
pip/_internal/utils/deprecation.py,sha256=pBnNogoA4UGTxa_JDnPXBRRYpKMbExAhXpBwAwklOBs,3318
|
||||
pip/_internal/utils/distutils_args.py,sha256=a56mblNxk9BGifbpEETG61mmBrqhjtjRkJ4HYn-oOEE,1350
|
||||
pip/_internal/utils/encoding.py,sha256=hxZz0t3Whw3d4MHQEiofxalTlfKwxFdLc8fpeGfhKo8,1320
|
||||
pip/_internal/utils/entrypoints.py,sha256=vHcNpnksCv6mllihU6hfifdsKPEjwcaJ1aLIXEaynaU,1152
|
||||
pip/_internal/utils/filesystem.py,sha256=PXa3vMcz4mbEKtkD0joFI8pBwddLQxhfPFOkVH5xjfE,5255
|
||||
pip/_internal/utils/filetypes.py,sha256=R2FwzoeX7b-rZALOXx5cuO8VPPMhUQ4ne7wm3n3IcWA,571
|
||||
pip/_internal/utils/glibc.py,sha256=LOeNGgawCKS-4ke9fii78fwXD73dtNav3uxz1Bf-Ab8,3297
|
||||
pip/_internal/utils/hashes.py,sha256=my-wSnAWEDvl_8rQaOQcVIWjwh1-f_QiEvGy9TPf53U,3942
|
||||
pip/_internal/utils/inject_securetransport.py,sha256=M17ZlFVY66ApgeASVjKKLKNz0LAfk-SyU0HZ4ZB6MmI,810
|
||||
pip/_internal/utils/logging.py,sha256=aJL7NldPhS5KGFof6Qt3o3MG5cjm5TOoo7bGRu9_wsg,13033
|
||||
pip/_internal/utils/marker_files.py,sha256=CO5djQlrPIozJpJybViH_insoAaBGY1aqEt6-cC-iW0,741
|
||||
pip/_internal/utils/misc.py,sha256=itSJCAJfjGJiUaQyooUPxqjjy21M71GXsVn4wFEIMJA,25334
|
||||
pip/_internal/utils/models.py,sha256=IA0hw_T4awQzui0kqfIEASm5yLtgZAB08ag59Nip5G8,1148
|
||||
pip/_internal/utils/packaging.py,sha256=VtiwcAAL7LBi7tGL2je7LeW4bE11KMHGCsJ1NZY5XtM,3035
|
||||
pip/_internal/utils/pkg_resources.py,sha256=ZX-k7V5q_aNWyDse92nN7orN1aCpRLsaxzpkBZ1XKzU,1254
|
||||
pip/_internal/utils/setuptools_build.py,sha256=DouaVolV9olDDFIIN9IszaL-FHdNaZt10ufOZFH9ZAU,5070
|
||||
pip/_internal/utils/subprocess.py,sha256=Ph3x5eHQBxFotyGhpZN8asSMBud-BBkmgaNfARG-di8,9922
|
||||
pip/_internal/utils/temp_dir.py,sha256=87Ib8aNic_hoSDEmUYJHTQIn5-prL2AYL5u_yZ3s4sI,7768
|
||||
pip/_internal/utils/typing.py,sha256=xkYwOeHlf4zsHXBDC4310HtEqwhQcYXFPq2h35Tcrl0,1401
|
||||
pip/_internal/utils/ui.py,sha256=0FNxXlGtbpPtTviv2oXS9t8bQG_NBdfUgP4GbubhS9U,13911
|
||||
pip/_internal/utils/unpacking.py,sha256=M944JTSiapBOSKLWu7lbawpVHSE7flfzZTEr3TAG7v8,9438
|
||||
pip/_internal/utils/urls.py,sha256=aNV9wq5ClUmrz6sG-al7hEWJ4ToitOy7l82CmFGFNW8,1481
|
||||
pip/_internal/utils/virtualenv.py,sha256=Q3S1WPlI7JWpGOT2jUVJ8l2chm_k7VPJ9cHA_cUluEU,3396
|
||||
pip/_internal/utils/wheel.py,sha256=grTRwZtMQwApwbbSPmRVLtac6FKy6SVKeCXNkWyyePA,7302
|
||||
pip/_internal/vcs/__init__.py,sha256=viJxJRqRE_mVScum85bgQIXAd6o0ozFt18VpC-qIJrM,617
|
||||
pip/_internal/vcs/bazaar.py,sha256=84q1-kj1_nJ9AMzMu8RmMp-riRZu81M7K9kowcYgi3U,3957
|
||||
pip/_internal/vcs/git.py,sha256=X0j5jv_x3ZnM_NP09B1ZDxW-PAmfHzqOqX7Wf5XW--0,14058
|
||||
pip/_internal/vcs/mercurial.py,sha256=2mg7BdYI_Fe00fF6omaNccFQLPHBsDBG5CAEzvqn5sA,5110
|
||||
pip/_internal/vcs/subversion.py,sha256=Fpwy71AmuqXnoKi6h1SrXRtPjEMn8fieuM1O4j01IBg,12292
|
||||
pip/_internal/vcs/versioncontrol.py,sha256=nqoaM1_rzx24WnHtihXA8RcPpnUae0sV2sR_LS_5HFA,22600
|
||||
pip/_vendor/__init__.py,sha256=gEJYEfJm7XGLslyjW3KBQyQxyTYxdvTEkRT5Bz28MDs,4657
|
||||
pip/_vendor/appdirs.py,sha256=kVvdzRSQW6hsZYPMrct3jav-CGIZ4horsK7KnUTjt7w,26130
|
||||
pip/_vendor/contextlib2.py,sha256=5HjGflUzwWAUfcILhSmC2GqvoYdZZzFzVfIDztHigUs,16915
|
||||
pip/_vendor/distro.py,sha256=X2So5kjrRKyMbQJ90Xgy93HU5eFtujCzKaYNeoy1k1c,43251
|
||||
pip/_vendor/ipaddress.py,sha256=-0RmurI31XgAaN20WCi0zrcuoat90nNA70_6yGlx2PU,79875
|
||||
pip/_vendor/pyparsing.py,sha256=_9UdBKfzmMgIpLtacjV7T1HDfnXomNxlzNEJSCZfyMA,272429
|
||||
pip/_vendor/retrying.py,sha256=k3fflf5_Mm0XcIJYhB7Tj34bqCCPhUDkYbx1NvW2FPE,9972
|
||||
pip/_vendor/six.py,sha256=Q6WvEXZ1DGEASAo3CGNCJkKv2tPy8xkSmK-VHE9PYIA,34074
|
||||
pip/_vendor/cachecontrol/__init__.py,sha256=pJtAaUxOsMPnytI1A3juAJkXYDr8krdSnsg4Yg3OBEg,302
|
||||
pip/_vendor/cachecontrol/_cmd.py,sha256=URGE0KrA87QekCG3SGPatlSPT571dZTDjNa-ZXX3pDc,1295
|
||||
pip/_vendor/cachecontrol/adapter.py,sha256=sSwaSYd93IIfCFU4tOMgSo6b2LCt_gBSaQUj8ktJFOA,4882
|
||||
pip/_vendor/cachecontrol/cache.py,sha256=1fc4wJP8HYt1ycnJXeEw5pCpeBL2Cqxx6g9Fb0AYDWQ,805
|
||||
pip/_vendor/cachecontrol/compat.py,sha256=kHNvMRdt6s_Xwqq_9qJmr9ou3wYMOMUMxPPcwNxT8Mc,695
|
||||
pip/_vendor/cachecontrol/controller.py,sha256=CWEX3pedIM9s60suf4zZPtm_JvVgnvogMGK_OiBG5F8,14149
|
||||
pip/_vendor/cachecontrol/filewrapper.py,sha256=vACKO8Llzu_ZWyjV1Fxn1MA4TGU60N5N3GSrAFdAY2Q,2533
|
||||
pip/_vendor/cachecontrol/heuristics.py,sha256=BFGHJ3yQcxvZizfo90LLZ04T_Z5XSCXvFotrp7Us0sc,4070
|
||||
pip/_vendor/cachecontrol/serialize.py,sha256=vIa4jvq4x_KSOLdEIedoknX2aXYHQujLDFV4-F21Dno,7091
|
||||
pip/_vendor/cachecontrol/wrapper.py,sha256=5LX0uJwkNQUtYSEw3aGmGu9WY8wGipd81mJ8lG0d0M4,690
|
||||
pip/_vendor/cachecontrol/caches/__init__.py,sha256=-gHNKYvaeD0kOk5M74eOrsSgIKUtC6i6GfbmugGweEo,86
|
||||
pip/_vendor/cachecontrol/caches/file_cache.py,sha256=nYVKsJtXh6gJXvdn1iWyrhxvkwpQrK-eKoMRzuiwkKk,4153
|
||||
pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=HxelMpNCo-dYr2fiJDwM3hhhRmxUYtB5tXm1GpAAT4Y,856
|
||||
pip/_vendor/certifi/__init__.py,sha256=JVwzDhkMttyVVtfNDrU_i0v2a-WmtEBXq0Z8oz4Ghzk,52
|
||||
pip/_vendor/certifi/__main__.py,sha256=NaCn6WtWME-zzVWQ2j4zFyl8cY4knDa9CwtHNIeFPhM,53
|
||||
pip/_vendor/certifi/cacert.pem,sha256=cyvv5Jx1gHACNEj2GaOrsIj0Tk8FmSvHR42uhzvlatg,281457
|
||||
pip/_vendor/certifi/core.py,sha256=EuFc2BsToG5O1-qsx4BSjQ1r1-7WRtH87b1WflZOWhI,218
|
||||
pip/_vendor/chardet/__init__.py,sha256=YsP5wQlsHJ2auF1RZJfypiSrCA7_bQiRm3ES_NI76-Y,1559
|
||||
pip/_vendor/chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254
|
||||
pip/_vendor/chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757
|
||||
pip/_vendor/chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411
|
||||
pip/_vendor/chardet/charsetgroupprober.py,sha256=6bDu8YIiRuScX4ca9Igb0U69TA2PGXXDej6Cc4_9kO4,3787
|
||||
pip/_vendor/chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110
|
||||
pip/_vendor/chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590
|
||||
pip/_vendor/chardet/compat.py,sha256=PKTzHkSbtbHDqS9PyujMbX74q1a8mMpeQTDVsQhZMRw,1134
|
||||
pip/_vendor/chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855
|
||||
pip/_vendor/chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661
|
||||
pip/_vendor/chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950
|
||||
pip/_vendor/chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510
|
||||
pip/_vendor/chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749
|
||||
pip/_vendor/chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546
|
||||
pip/_vendor/chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748
|
||||
pip/_vendor/chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621
|
||||
pip/_vendor/chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747
|
||||
pip/_vendor/chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715
|
||||
pip/_vendor/chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754
|
||||
pip/_vendor/chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838
|
||||
pip/_vendor/chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777
|
||||
pip/_vendor/chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643
|
||||
pip/_vendor/chardet/langbulgarianmodel.py,sha256=1HqQS9Pbtnj1xQgxitJMvw8X6kKr5OockNCZWfEQrPE,12839
|
||||
pip/_vendor/chardet/langcyrillicmodel.py,sha256=LODajvsetH87yYDDQKA2CULXUH87tI223dhfjh9Zx9c,17948
|
||||
pip/_vendor/chardet/langgreekmodel.py,sha256=8YAW7bU8YwSJap0kIJSbPMw1BEqzGjWzqcqf0WgUKAA,12688
|
||||
pip/_vendor/chardet/langhebrewmodel.py,sha256=JSnqmE5E62tDLTPTvLpQsg5gOMO4PbdWRvV7Avkc0HA,11345
|
||||
pip/_vendor/chardet/langhungarianmodel.py,sha256=RhapYSG5l0ZaO-VV4Fan5sW0WRGQqhwBM61yx3yxyOA,12592
|
||||
pip/_vendor/chardet/langthaimodel.py,sha256=8l0173Gu_W6G8mxmQOTEF4ls2YdE7FxWf3QkSxEGXJQ,11290
|
||||
pip/_vendor/chardet/langturkishmodel.py,sha256=W22eRNJsqI6uWAfwXSKVWWnCerYqrI8dZQTm_M0lRFk,11102
|
||||
pip/_vendor/chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370
|
||||
pip/_vendor/chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413
|
||||
pip/_vendor/chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012
|
||||
pip/_vendor/chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481
|
||||
pip/_vendor/chardet/sbcharsetprober.py,sha256=LDSpCldDCFlYwUkGkwD2oFxLlPWIWXT09akH_2PiY74,5657
|
||||
pip/_vendor/chardet/sbcsgroupprober.py,sha256=1IprcCB_k1qfmnxGC6MBbxELlKqD3scW6S8YIwdeyXA,3546
|
||||
pip/_vendor/chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774
|
||||
pip/_vendor/chardet/universaldetector.py,sha256=qL0174lSZE442eB21nnktT9_VcAye07laFWUeUrjttY,12485
|
||||
pip/_vendor/chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766
|
||||
pip/_vendor/chardet/version.py,sha256=sp3B08mrDXB-pf3K9fqJ_zeDHOCLC8RrngQyDFap_7g,242
|
||||
pip/_vendor/chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
||||
pip/_vendor/chardet/cli/chardetect.py,sha256=DI8dlV3FBD0c0XA_y3sQ78z754DUv1J8n34RtDjOXNw,2774
|
||||
pip/_vendor/colorama/__init__.py,sha256=DqjXH9URVP3IJwmMt7peYw50ns1RNAymIB9-XdPEFV8,239
|
||||
pip/_vendor/colorama/ansi.py,sha256=Fi0un-QLqRm-v7o_nKiOqyC8PapBJK7DLV_q9LKtTO0,2524
|
||||
pip/_vendor/colorama/ansitowin32.py,sha256=u8QaqdqS_xYSfNkPM1eRJLHz6JMWPodaJaP0mxgHCDc,10462
|
||||
pip/_vendor/colorama/initialise.py,sha256=PprovDNxMTrvoNHFcL2NZjpH2XzDc8BLxLxiErfUl4k,1915
|
||||
pip/_vendor/colorama/win32.py,sha256=bJ8Il9jwaBN5BJ8bmN6FoYZ1QYuMKv2j8fGrXh7TJjw,5404
|
||||
pip/_vendor/colorama/winterm.py,sha256=2y_2b7Zsv34feAsP67mLOVc-Bgq51mdYGo571VprlrM,6438
|
||||
pip/_vendor/distlib/__init__.py,sha256=gzl1hjUXmDGrqRyU7ZLjBwJGAcMimQbrZ22XPVaKaRE,581
|
||||
pip/_vendor/distlib/compat.py,sha256=xdNZmqFN5HwF30HjRn5M415pcC2kgXRBXn767xS8v-M,41404
|
||||
pip/_vendor/distlib/database.py,sha256=fhNzEDtb4HXrpxKyQvhVzDXcOiJlzrOM--UYnvCeZrI,51045
|
||||
pip/_vendor/distlib/index.py,sha256=SXKzpQCERctxYDMp_OLee2f0J0e19ZhGdCIoMlUfUQM,21066
|
||||
pip/_vendor/distlib/locators.py,sha256=c9E4cDEacJ_uKbuE5BqAVocoWp6rsuBGTkiNDQq3zV4,52100
|
||||
pip/_vendor/distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811
|
||||
pip/_vendor/distlib/markers.py,sha256=6Ac3cCfFBERexiESWIOXmg-apIP8l2esafNSX3KMy-8,4387
|
||||
pip/_vendor/distlib/metadata.py,sha256=OhbCKmf5lswE8unWBopI1hj7tRpHp4ZbFvU4d6aAEMM,40234
|
||||
pip/_vendor/distlib/resources.py,sha256=2FGv0ZHF14KXjLIlL0R991lyQQGcewOS4mJ-5n-JVnc,10766
|
||||
pip/_vendor/distlib/scripts.py,sha256=OAkEwxRvIzX-VSfhEttQEKJFVLA47gbW0OgQXJRs7OQ,16998
|
||||
pip/_vendor/distlib/t32.exe,sha256=NS3xBCVAld35JVFNmb-1QRyVtThukMrwZVeXn4LhaEQ,96768
|
||||
pip/_vendor/distlib/t64.exe,sha256=oAqHes78rUWVM0OtVqIhUvequl_PKhAhXYQWnUf7zR0,105984
|
||||
pip/_vendor/distlib/util.py,sha256=f2jZCPrcLCt6LcnC0gUy-Fur60tXD8reA7k4rDpHMDw,59845
|
||||
pip/_vendor/distlib/version.py,sha256=_n7F6juvQGAcn769E_SHa7fOcf5ERlEVymJ_EjPRwGw,23391
|
||||
pip/_vendor/distlib/w32.exe,sha256=lJtnZdeUxTZWya_EW5DZos_K5rswRECGspIl8ZJCIXs,90112
|
||||
pip/_vendor/distlib/w64.exe,sha256=0aRzoN2BO9NWW4ENy4_4vHkHR4qZTFZNVSAJJYlODTI,99840
|
||||
pip/_vendor/distlib/wheel.py,sha256=bRtR5bNR_u_DwkwktN1bgZuwLVOJT1p_vNIUPyN8kJc,40452
|
||||
pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274
|
||||
pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971
|
||||
pip/_vendor/distlib/_backport/shutil.py,sha256=VW1t3uYqUjWZH7jV-6QiimLhnldoV5uIpH4EuiT1jfw,25647
|
||||
pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617
|
||||
pip/_vendor/distlib/_backport/sysconfig.py,sha256=BQHFlb6pubCl_dvT1NjtzIthylofjKisox239stDg0U,26854
|
||||
pip/_vendor/distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628
|
||||
pip/_vendor/html5lib/__init__.py,sha256=Ztrn7UvF-wIFAgRBBa0ML-Gu5AffH3BPX_INJx4SaBI,1162
|
||||
pip/_vendor/html5lib/_ihatexml.py,sha256=3LBtJMlzgwM8vpQiU1TvGmEEmNH72sV0yD8yS53y07A,16705
|
||||
pip/_vendor/html5lib/_inputstream.py,sha256=bPUWcAfJScK4xkjQQaG_HsI2BvEVbFvI0AsodDYPQj0,32552
|
||||
pip/_vendor/html5lib/_tokenizer.py,sha256=YAaOEBD6qc5ISq9Xt9Nif1OFgcybTTfMdwqBkZhpAq4,76580
|
||||
pip/_vendor/html5lib/_utils.py,sha256=ismpASeqa2jqEPQjHUj8vReAf7yIoKnvLN5fuOw6nv0,4015
|
||||
pip/_vendor/html5lib/constants.py,sha256=4lmZWLtEPRLnl8NzftOoYTJdo6jpeMtP6dqQC0g_bWQ,83518
|
||||
pip/_vendor/html5lib/html5parser.py,sha256=g5g2ezkusHxhi7b23vK_-d6K6BfIJRbqIQmvQ9z4EgI,118963
|
||||
pip/_vendor/html5lib/serializer.py,sha256=yfcfBHse2wDs6ojxn-kieJjLT5s1ipilQJ0gL3-rJis,15758
|
||||
pip/_vendor/html5lib/_trie/__init__.py,sha256=8VR1bcgD2OpeS2XExpu5yBhP_Q1K-lwKbBKICBPf1kU,289
|
||||
pip/_vendor/html5lib/_trie/_base.py,sha256=CaybYyMro8uERQYjby2tTeSUatnWDfWroUN9N7ety5w,1013
|
||||
pip/_vendor/html5lib/_trie/datrie.py,sha256=EQpqSfkZRuTbE-DuhW7xMdVDxdZNZ0CfmnYfHA_3zxM,1178
|
||||
pip/_vendor/html5lib/_trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775
|
||||
pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=lViZc2JMCclXi_5gduvmdzrRxtO5Xo9ONnbHBVCsykU,919
|
||||
pip/_vendor/html5lib/filters/base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286
|
||||
pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=egDXUEHXmAG9504xz0K6ALDgYkvUrC2q15YUVeNlVQg,2945
|
||||
pip/_vendor/html5lib/filters/lint.py,sha256=jk6q56xY0ojiYfvpdP-OZSm9eTqcAdRqhCoPItemPYA,3643
|
||||
pip/_vendor/html5lib/filters/optionaltags.py,sha256=8lWT75J0aBOHmPgfmqTHSfPpPMp01T84NKu0CRedxcE,10588
|
||||
pip/_vendor/html5lib/filters/sanitizer.py,sha256=4ON02KNjuqda1lCw5_JCUZxb0BzWR5M7ON84dtJ7dm0,26248
|
||||
pip/_vendor/html5lib/filters/whitespace.py,sha256=8eWqZxd4UC4zlFGW6iyY6f-2uuT8pOCSALc3IZt7_t4,1214
|
||||
pip/_vendor/html5lib/treeadapters/__init__.py,sha256=A0rY5gXIe4bJOiSGRO_j_tFhngRBO8QZPzPtPw5dFzo,679
|
||||
pip/_vendor/html5lib/treeadapters/genshi.py,sha256=CH27pAsDKmu4ZGkAUrwty7u0KauGLCZRLPMzaO3M5vo,1715
|
||||
pip/_vendor/html5lib/treeadapters/sax.py,sha256=BKS8woQTnKiqeffHsxChUqL4q2ZR_wb5fc9MJ3zQC8s,1776
|
||||
pip/_vendor/html5lib/treebuilders/__init__.py,sha256=AysSJyvPfikCMMsTVvaxwkgDieELD5dfR8FJIAuq7hY,3592
|
||||
pip/_vendor/html5lib/treebuilders/base.py,sha256=wQGp5yy22TNG8tJ6aREe4UUeTR7A99dEz0BXVaedWb4,14579
|
||||
pip/_vendor/html5lib/treebuilders/dom.py,sha256=22whb0C71zXIsai5mamg6qzBEiigcBIvaDy4Asw3at0,8925
|
||||
pip/_vendor/html5lib/treebuilders/etree.py,sha256=aqIBOGj_dFYqBURIcTegGNBhAIJOw5iFDHb4jrkYH-8,12764
|
||||
pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=9V0dXxbJYYq-Skgb5-_OL2NkVYpjioEb4CHajo0e9yI,14122
|
||||
pip/_vendor/html5lib/treewalkers/__init__.py,sha256=yhXxHpjlSqfQyUag3v8-vWjMPriFBU8YRAPNpDgBTn8,5714
|
||||
pip/_vendor/html5lib/treewalkers/base.py,sha256=ouiOsuSzvI0KgzdWP8PlxIaSNs9falhbiinAEc_UIJY,7476
|
||||
pip/_vendor/html5lib/treewalkers/dom.py,sha256=EHyFR8D8lYNnyDU9lx_IKigVJRyecUGua0mOi7HBukc,1413
|
||||
pip/_vendor/html5lib/treewalkers/etree.py,sha256=sz1o6mmE93NQ53qJFDO7HKyDtuwgK-Ay3qSFZPC6u00,4550
|
||||
pip/_vendor/html5lib/treewalkers/etree_lxml.py,sha256=sY6wfRshWTllu6n48TPWpKsQRPp-0CQrT0hj_AdzHSU,6309
|
||||
pip/_vendor/html5lib/treewalkers/genshi.py,sha256=4D2PECZ5n3ZN3qu3jMl9yY7B81jnQApBQSVlfaIuYbA,2309
|
||||
pip/_vendor/idna/__init__.py,sha256=9Nt7xpyet3DmOrPUGooDdAwmHZZu1qUAy2EaJ93kGiQ,58
|
||||
pip/_vendor/idna/codec.py,sha256=lvYb7yu7PhAqFaAIAdWcwgaWI2UmgseUua-1c0AsG0A,3299
|
||||
pip/_vendor/idna/compat.py,sha256=R-h29D-6mrnJzbXxymrWUW7iZUvy-26TQwZ0ij57i4U,232
|
||||
pip/_vendor/idna/core.py,sha256=JDCZZ_PLESqIgEbU8mPyoEufWwoOiIqygA17-QZIe3s,11733
|
||||
pip/_vendor/idna/idnadata.py,sha256=HXaPFw6_YAJ0qppACPu0YLAULtRs3QovRM_CCZHGdY0,40899
|
||||
pip/_vendor/idna/intranges.py,sha256=TY1lpxZIQWEP6tNqjZkFA5hgoMWOj1OBmnUG8ihT87E,1749
|
||||
pip/_vendor/idna/package_data.py,sha256=kIzeKKXEouXLR4srqwf9Q3zv-NffKSOz5aSDOJARPB0,21
|
||||
pip/_vendor/idna/uts46data.py,sha256=oLyNZ1pBaiBlj9zFzLFRd_P7J8MkRcgDisjExZR_4MY,198292
|
||||
pip/_vendor/msgpack/__init__.py,sha256=LnKzG5v0RyZgs7KlY2-SZYDBn-toylovXxKiXR6C-IQ,1535
|
||||
pip/_vendor/msgpack/_version.py,sha256=72BxB5FMl1q3Nz1hteHINzHhrFpXQ9nNtULaK52NLk8,20
|
||||
pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081
|
||||
pip/_vendor/msgpack/fallback.py,sha256=vXo6S67Dmil9mz0PRBCLDu6znpv6CGKt9WPCEsdZx2A,37454
|
||||
pip/_vendor/packaging/__about__.py,sha256=G5P2sPs0QxgVqD0zzcSjVLfY31ni6HVUh9ZWlHSAG3M,744
|
||||
pip/_vendor/packaging/__init__.py,sha256=6enbp5XgRfjBjsI9-bn00HjHf5TH21PDMOKkJW8xw-w,562
|
||||
pip/_vendor/packaging/_compat.py,sha256=Z-PwchK0cREbaRGF5MZP8LEv8JkC-qydn2FRrtjeixk,1138
|
||||
pip/_vendor/packaging/_structures.py,sha256=ozkCX8Q8f2qE1Eic3YiQ4buDVfgz2iYevY9e7R2y3iY,2022
|
||||
pip/_vendor/packaging/_typing.py,sha256=-cq_iNeveAWCVoseVvqmknWLbvZ_i9g7BeZBo0ShtHg,1449
|
||||
pip/_vendor/packaging/markers.py,sha256=yap5bk3c8QyPuGtiVbQSYhN70bxWj1nLDv2ZuaCLq7g,9501
|
||||
pip/_vendor/packaging/requirements.py,sha256=G43p2ylM_REg87RLG9JybjbdwfaPyzaKYRtllRfNdrM,4913
|
||||
pip/_vendor/packaging/specifiers.py,sha256=Nz8bnFp53cQInmRGZy50QXlIi2tkDXMfRuGyGps2IRE,31314
|
||||
pip/_vendor/packaging/tags.py,sha256=lJ_91F0icMlFvMp7EiKWPSzgJclNsEYdjdErhryfGj4,23510
|
||||
pip/_vendor/packaging/utils.py,sha256=v5Wk8B7gUL13Rzed6NNhCZlutPQT7jNV-7hr-WOtacU,1700
|
||||
pip/_vendor/packaging/version.py,sha256=qRdNN0_XuPFOJ3fut8ehzxJrNYtBzqF8ZtagEvgNUUM,15480
|
||||
pip/_vendor/pep517/__init__.py,sha256=nCw8ZdLH4c19g8xP_Ndag1KPdQhlSDKaL9pg-X7uNWU,84
|
||||
pip/_vendor/pep517/_in_process.py,sha256=v1Viek27-MGCOFu8eSlLd2jGCrIqc1fISnutGFoRDps,7792
|
||||
pip/_vendor/pep517/build.py,sha256=WqM0-X4KyzY566qxGf3FeaYc1hw95H7YP0ElZ1zuTb0,3318
|
||||
pip/_vendor/pep517/check.py,sha256=ST02kRWBrRMOxgnRm9clw18Q2X7sJGaD4j3h6GmBhJ8,5949
|
||||
pip/_vendor/pep517/colorlog.py,sha256=Tk9AuYm_cLF3BKTBoSTJt9bRryn0aFojIQOwbfVUTxQ,4098
|
||||
pip/_vendor/pep517/compat.py,sha256=M-5s4VNp8rjyT76ZZ_ibnPD44DYVzSQlyCEHayjtDPw,780
|
||||
pip/_vendor/pep517/dirtools.py,sha256=2mkAkAL0mRz_elYFjRKuekTJVipH1zTn4tbf1EDev84,1129
|
||||
pip/_vendor/pep517/envbuild.py,sha256=K4dIGAbkXf3RoQX_9RFpZvMvPrVSHtcbH7o9VSrNnlM,6024
|
||||
pip/_vendor/pep517/meta.py,sha256=8mnM5lDnT4zXQpBTliJbRGfesH7iioHwozbDxALPS9Y,2463
|
||||
pip/_vendor/pep517/wrappers.py,sha256=QiQaEQlfCrhRpPBFQiGVM9QjrKSlj8AvM39haoyfPRk,10599
|
||||
pip/_vendor/pkg_resources/__init__.py,sha256=XpGBfvS9fafA6bm5rx7vnxdxs7yqyoc_NnpzKApkJ64,108277
|
||||
pip/_vendor/pkg_resources/py31compat.py,sha256=CRk8fkiPRDLsbi5pZcKsHI__Pbmh_94L8mr9Qy9Ab2U,562
|
||||
pip/_vendor/progress/__init__.py,sha256=fcbQQXo5np2CoQyhSH5XprkicwLZNLePR3uIahznSO0,4857
|
||||
pip/_vendor/progress/bar.py,sha256=QuDuVNcmXgpxtNtxO0Fq72xKigxABaVmxYGBw4J3Z_E,2854
|
||||
pip/_vendor/progress/counter.py,sha256=MznyBrvPWrOlGe4MZAlGUb9q3aODe6_aNYeAE_VNoYA,1372
|
||||
pip/_vendor/progress/spinner.py,sha256=k8JbDW94T0-WXuXfxZIFhdoNPYp3jfnpXqBnfRv5fGs,1380
|
||||
pip/_vendor/pytoml/__init__.py,sha256=W_SKx36Hsew-Fty36BOpreLm4uF4V_Tgkm_z9rIoOE8,127
|
||||
pip/_vendor/pytoml/core.py,sha256=9CrLLTs1PdWjEwRnYzt_i4dhHcZvGxs_GsMlYAX3iY4,509
|
||||
pip/_vendor/pytoml/parser.py,sha256=qsc0NRnTgdFZgRp9gmr6D_KWFelrwxLkTj9dVxUcqS8,10309
|
||||
pip/_vendor/pytoml/test.py,sha256=2nQs4aX3XQEaaQCx6x_OJTS2Hb0_IiTZRqNOeDmLCzo,1021
|
||||
pip/_vendor/pytoml/utils.py,sha256=JCLHx77Hu1R3F-bRgiROIiKyCzLwyebnp5P35cRJxWs,1665
|
||||
pip/_vendor/pytoml/writer.py,sha256=4QQky9JSuRv60uzuhVZASU8T3CuobSkLG1285X6bDW8,3369
|
||||
pip/_vendor/requests/__init__.py,sha256=ONVsH6kJuPTV9nf-XVoubWsVX3qVtjCyju42kTW6Uug,4074
|
||||
pip/_vendor/requests/__version__.py,sha256=Bm-GFstQaFezsFlnmEMrJDe8JNROz9n2XXYtODdvjjc,436
|
||||
pip/_vendor/requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096
|
||||
pip/_vendor/requests/adapters.py,sha256=e-bmKEApNVqFdylxuMJJfiaHdlmS_zhWhIMEzlHvGuc,21548
|
||||
pip/_vendor/requests/api.py,sha256=fbUo11QoLOoNgWU6FfvNz8vMj9bE_cMmICXBa7TZHJs,6271
|
||||
pip/_vendor/requests/auth.py,sha256=QB2-cSUj1jrvWZfPXttsZpyAacQgtKLVk14vQW9TpSE,10206
|
||||
pip/_vendor/requests/certs.py,sha256=nXRVq9DtGmv_1AYbwjTu9UrgAcdJv05ZvkNeaoLOZxY,465
|
||||
pip/_vendor/requests/compat.py,sha256=FZX4Q_EMKiMnhZpZ3g_gOsT-j2ca9ij2gehDx1cwYeo,1941
|
||||
pip/_vendor/requests/cookies.py,sha256=Y-bKX6TvW3FnYlE6Au0SXtVVWcaNdFvuAwQxw-G0iTI,18430
|
||||
pip/_vendor/requests/exceptions.py,sha256=-mLam3TAx80V09EaH3H-ZxR61eAVuLRZ8zgBBSLjK44,3197
|
||||
pip/_vendor/requests/help.py,sha256=SJPVcoXeo7KfK4AxJN5eFVQCjr0im87tU2n7ubLsksU,3578
|
||||
pip/_vendor/requests/hooks.py,sha256=QReGyy0bRcr5rkwCuObNakbYsc7EkiKeBwG4qHekr2Q,757
|
||||
pip/_vendor/requests/models.py,sha256=6s-37iAqXVptq8z7U_LoH_pbIPrCQUm_Z8QuIGE29Q0,34275
|
||||
pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695
|
||||
pip/_vendor/requests/sessions.py,sha256=DjbCotDW6xSAaBsjbW-L8l4N0UcwmrxVNgSrZgIjGWM,29332
|
||||
pip/_vendor/requests/status_codes.py,sha256=XWlcpBjbCtq9sSqpH9_KKxgnLTf9Z__wCWolq21ySlg,4129
|
||||
pip/_vendor/requests/structures.py,sha256=zoP8qly2Jak5e89HwpqjN1z2diztI-_gaqts1raJJBc,2981
|
||||
pip/_vendor/requests/utils.py,sha256=LtPJ1db6mJff2TJSJWKi7rBpzjPS3mSOrjC9zRhoD3A,30049
|
||||
pip/_vendor/urllib3/__init__.py,sha256=--dxP-3k5qC8gGCQJbU_jJK666_rbCduadrwRB25wZg,2683
|
||||
pip/_vendor/urllib3/_collections.py,sha256=GouVsNzwg6jADZTmimMI6oqmwKSswnMo9dh5tGNVWO4,10792
|
||||
pip/_vendor/urllib3/connection.py,sha256=JaGozqRdvNogTwHDGxbp2N3Hi2MtJQrkbr7b5qcBGXk,15168
|
||||
pip/_vendor/urllib3/connectionpool.py,sha256=2RPMZJU_PhkAbY1tvy3-W_9os4Kdk_XXu8Zi6YSCgSU,36488
|
||||
pip/_vendor/urllib3/exceptions.py,sha256=P3e-p9_LScyIxX7FoR3wU0A6hZmDqFAVCz2wgI3D0lM,6607
|
||||
pip/_vendor/urllib3/fields.py,sha256=kroD76QK-GdHHW7f_AUN4XxDC3OQPI2FFrS9eSL4BCs,8553
|
||||
pip/_vendor/urllib3/filepost.py,sha256=vj0qbrpT1AFzvvW4SuC8M5kJiw7wftHcSr-7b8UpPpw,2440
|
||||
pip/_vendor/urllib3/poolmanager.py,sha256=JYUyBUN3IiEknUdjZ7VJrpCQr6SP7vi0WwSndrn8XpE,17053
|
||||
pip/_vendor/urllib3/request.py,sha256=hhoHvEEatyd9Tn5EbGjQ0emn-ENMCyY591yNWTneINA,6018
|
||||
pip/_vendor/urllib3/response.py,sha256=O2DVzBeWOzyxZDZ8k0EDFU3GW1jWXk_b03mS0O1ybxs,27836
|
||||
pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=PCxFG7RoB-AOkIWQWGBIg1yZnK0dwPxWcNx7BTpZFBI,909
|
||||
pip/_vendor/urllib3/contrib/appengine.py,sha256=gfdK4T7CRin7v9HRhHDbDh-Hbk66hHDWeoz7nV3PJo8,11034
|
||||
pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=a402AwGN_Ll3N-4ur_AS6UrU-ycUtlnYqoBF76lORg8,4160
|
||||
pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=w35mWy_1POZUsbOhurVb_zhf0C1Jkd79AFlucLs6KuQ,16440
|
||||
pip/_vendor/urllib3/contrib/securetransport.py,sha256=iKzVUAxKnChsADR5YMwc05oEixXDzAk0xPU0g-rc2z8,32275
|
||||
pip/_vendor/urllib3/contrib/socks.py,sha256=nzDMgDIFJWVubKHqvIn2-SKCO91hhJInP92WgHChGzA,7036
|
||||
pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=mullWYFaghBdRWla6HYU-TBgFRTPLBEfxj3jplbeJmQ,16886
|
||||
pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=V7GnujxnWZh2N2sMsV5N4d9Imymokkm3zBwgt77_bSE,11956
|
||||
pip/_vendor/urllib3/packages/__init__.py,sha256=h4BLhD4tLaBx1adaDtKXfupsgqY0wWLXb_f1_yVlV6A,108
|
||||
pip/_vendor/urllib3/packages/six.py,sha256=adx4z-eM_D0Vvu0IIqVzFACQ_ux9l64y7DkSEfbxCDs,32536
|
||||
pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pip/_vendor/urllib3/packages/backports/makefile.py,sha256=005wrvH-_pWSnTFqQ2sdzzh4zVCtQUUQ4mR2Yyxwc0A,1418
|
||||
pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py,sha256=ywgKMtfHi1-DrXlzPfVAhzsLzzqcK7GT6eLgdode1Fg,688
|
||||
pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=rvQDQviqQLtPJB6MfEgABnBFj3nXft7ZJ3Dx-BC0AQY,5696
|
||||
pip/_vendor/urllib3/util/__init__.py,sha256=bWNaav_OT-1L7-sxm59cGb59rDORlbhb_4noduM5m0U,1038
|
||||
pip/_vendor/urllib3/util/connection.py,sha256=NsxUAKQ98GKywta--zg57CdVpeTCI6N-GElCq78Dl8U,4637
|
||||
pip/_vendor/urllib3/util/queue.py,sha256=myTX3JDHntglKQNBf3b6dasHH-uF-W59vzGSQiFdAfI,497
|
||||
pip/_vendor/urllib3/util/request.py,sha256=C-6-AWffxZG03AdRGoY59uqsn4CVItKU6gjxz7Hc3Mc,3815
|
||||
pip/_vendor/urllib3/util/response.py,sha256=_WbTQr8xRQuJuY2rTIZxVdJD6mnEOtQupjaK_bF_Vj8,2573
|
||||
pip/_vendor/urllib3/util/retry.py,sha256=Ui74h44gLIIWkAxT9SK3A2mEvu55-odWgJMw3LiUNGk,15450
|
||||
pip/_vendor/urllib3/util/ssl_.py,sha256=7mB3AsidIqLLq6gbeBL-7Ta0MyVOL5uZax8_5bH3y7c,14163
|
||||
pip/_vendor/urllib3/util/timeout.py,sha256=bCtaS_xVKaTDJ5VMlroXBfCnPUDNVGZqik7-z83issg,9871
|
||||
pip/_vendor/urllib3/util/url.py,sha256=jXDEENCiE7gZPFcCMxTCcNjkQw6nbpgqSuIUPrS07FI,14113
|
||||
pip/_vendor/urllib3/util/wait.py,sha256=k46KzqIYu3Vnzla5YW3EvtInNlU_QycFqQAghIOxoAg,5406
|
||||
pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579
|
||||
pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979
|
||||
pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305
|
||||
pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563
|
||||
pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307
|
||||
pip-20.0.2.dist-info/LICENSE.txt,sha256=W6Ifuwlk-TatfRU2LR7W1JMcyMj5_y1NkRkOEJvnRDE,1090
|
||||
pip-20.0.2.dist-info/METADATA,sha256=MSgjT2JTt8usp4Hopp5AGEmc-7sKR2Jd7HTMJqCoRhw,3352
|
||||
pip-20.0.2.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110
|
||||
pip-20.0.2.dist-info/entry_points.txt,sha256=HtfDOwpUlr9s73jqLQ6wF9V0_0qvUXJwCBz7Vwx0Ue0,125
|
||||
pip-20.0.2.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
pip-20.0.2.dist-info/RECORD,,
|
||||
pip/_internal/req/req_file.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__,,
|
||||
pip/_vendor/distlib/version.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/cli/chardetect.cpython-38.pyc,,
|
||||
pip/_internal/index/__pycache__,,
|
||||
pip/_vendor/html5lib/treewalkers/dom.cpython-38.pyc,,
|
||||
pip/_vendor/pep517/__pycache__,,
|
||||
pip/_vendor/urllib3/exceptions.cpython-38.pyc,,
|
||||
pip/_vendor/pep517/dirtools.cpython-38.pyc,,
|
||||
pip/_vendor/cachecontrol/adapter.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/langturkishmodel.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/contrib/pyopenssl.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/treewalkers/__pycache__,,
|
||||
pip/_internal/network/utils.cpython-38.pyc,,
|
||||
pip/_vendor/pytoml/__pycache__,,
|
||||
pip/_vendor/colorama/win32.cpython-38.pyc,,
|
||||
pip/_internal/models/candidate.cpython-38.pyc,,
|
||||
../../../bin/pip3,,
|
||||
pip/_vendor/chardet/sbcsgroupprober.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/util/url.cpython-38.pyc,,
|
||||
pip/_internal/distributions/installed.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/treebuilders/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/requests/api.cpython-38.pyc,,
|
||||
pip/_vendor/cachecontrol/controller.cpython-38.pyc,,
|
||||
pip/_vendor/requests/_internal_utils.cpython-38.pyc,,
|
||||
pip/_vendor/idna/__pycache__,,
|
||||
pip/_vendor/requests/certs.cpython-38.pyc,,
|
||||
pip/_internal/utils/logging.cpython-38.pyc,,
|
||||
pip/_internal/operations/install/wheel.cpython-38.pyc,,
|
||||
pip/_internal/operations/build/wheel.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/treebuilders/base.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/util/timeout.cpython-38.pyc,,
|
||||
pip/_internal/commands/list.cpython-38.pyc,,
|
||||
pip/_vendor/requests/auth.cpython-38.pyc,,
|
||||
pip/_internal/legacy_resolve.cpython-38.pyc,,
|
||||
pip/_vendor/requests/packages.cpython-38.pyc,,
|
||||
pip/_vendor/idna/uts46data.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/big5freq.cpython-38.pyc,,
|
||||
pip/_vendor/six.cpython-38.pyc,,
|
||||
pip/_vendor/cachecontrol/caches/redis_cache.cpython-38.pyc,,
|
||||
pip/_internal/commands/download.cpython-38.pyc,,
|
||||
pip/_vendor/pep517/colorlog.cpython-38.pyc,,
|
||||
pip/_internal/req/__pycache__,,
|
||||
pip/_vendor/distlib/compat.cpython-38.pyc,,
|
||||
pip/_internal/distributions/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/cachecontrol/heuristics.cpython-38.pyc,,
|
||||
pip/_internal/commands/search.cpython-38.pyc,,
|
||||
pip/_internal/operations/freeze.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/langgreekmodel.cpython-38.pyc,,
|
||||
pip/_internal/utils/__init__.cpython-38.pyc,,
|
||||
pip/_internal/vcs/versioncontrol.cpython-38.pyc,,
|
||||
pip/_vendor/requests/cookies.cpython-38.pyc,,
|
||||
pip/_vendor/retrying.cpython-38.pyc,,
|
||||
pip/_vendor/webencodings/__init__.cpython-38.pyc,,
|
||||
pip/_internal/cli/autocompletion.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/filters/__pycache__,,
|
||||
pip/_vendor/html5lib/filters/inject_meta_charset.cpython-38.pyc,,
|
||||
pip/_internal/operations/check.cpython-38.pyc,,
|
||||
pip/_internal/index/package_finder.cpython-38.pyc,,
|
||||
pip/_internal/commands/freeze.cpython-38.pyc,,
|
||||
pip/_vendor/pytoml/utils.cpython-38.pyc,,
|
||||
pip/_vendor/colorama/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/filters/optionaltags.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/escsm.cpython-38.pyc,,
|
||||
pip/_internal/utils/compat.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/contrib/_securetransport/__pycache__,,
|
||||
pip/_internal/commands/debug.cpython-38.pyc,,
|
||||
pip/_internal/utils/filetypes.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/contrib/socks.cpython-38.pyc,,
|
||||
pip/_vendor/packaging/__about__.cpython-38.pyc,,
|
||||
pip/_internal/cli/parser.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/packages/six.cpython-38.pyc,,
|
||||
pip/_internal/commands/check.cpython-38.pyc,,
|
||||
pip/_vendor/progress/bar.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/codingstatemachine.cpython-38.pyc,,
|
||||
pip/_internal/network/cache.cpython-38.pyc,,
|
||||
pip/_internal/main.cpython-38.pyc,,
|
||||
pip/_vendor/packaging/version.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/treebuilders/dom.cpython-38.pyc,,
|
||||
pip/_internal/distributions/sdist.cpython-38.pyc,,
|
||||
pip/_vendor/pytoml/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/pep517/compat.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/hebrewprober.cpython-38.pyc,,
|
||||
pip/_vendor/requests/structures.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/treebuilders/__pycache__,,
|
||||
pip/_internal/utils/setuptools_build.cpython-38.pyc,,
|
||||
pip/_internal/commands/configuration.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/treewalkers/base.cpython-38.pyc,,
|
||||
pip/_vendor/pytoml/core.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/charsetprober.cpython-38.pyc,,
|
||||
pip/_vendor/idna/__init__.cpython-38.pyc,,
|
||||
pip/_internal/utils/entrypoints.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/filters/whitespace.cpython-38.pyc,,
|
||||
pip/_vendor/pytoml/writer.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/serializer.cpython-38.pyc,,
|
||||
pip/_vendor/requests/sessions.cpython-38.pyc,,
|
||||
pip/_internal/cache.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/treeadapters/genshi.cpython-38.pyc,,
|
||||
pip/_vendor/idna/compat.cpython-38.pyc,,
|
||||
pip/_vendor/packaging/requirements.cpython-38.pyc,,
|
||||
pip/_vendor/idna/core.cpython-38.pyc,,
|
||||
pip/_vendor/distlib/index.cpython-38.pyc,,
|
||||
pip/_vendor/pep517/_in_process.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/_ihatexml.cpython-38.pyc,,
|
||||
pip/_internal/wheel_builder.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/contrib/_appengine_environ.cpython-38.pyc,,
|
||||
pip/_vendor/colorama/winterm.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/poolmanager.cpython-38.pyc,,
|
||||
pip/_vendor/distlib/resources.cpython-38.pyc,,
|
||||
pip/_internal/models/selection_prefs.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/euckrfreq.cpython-38.pyc,,
|
||||
pip/_internal/distributions/__pycache__,,
|
||||
pip/_vendor/urllib3/contrib/securetransport.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/jpcntx.cpython-38.pyc,,
|
||||
pip/_internal/utils/__pycache__,,
|
||||
pip/_internal/commands/wheel.cpython-38.pyc,,
|
||||
pip/_internal/utils/filesystem.cpython-38.pyc,,
|
||||
pip/_internal/models/target_python.cpython-38.pyc,,
|
||||
pip/_vendor/webencodings/__pycache__,,
|
||||
pip/_vendor/requests/__version__.cpython-38.pyc,,
|
||||
pip/_internal/cli/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/treewalkers/etree.cpython-38.pyc,,
|
||||
pip/_internal/models/wheel.cpython-38.pyc,,
|
||||
pip/_internal/index/collector.cpython-38.pyc,,
|
||||
pip/_vendor/colorama/__pycache__,,
|
||||
pip/_vendor/urllib3/util/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/filters/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/colorama/initialise.cpython-38.pyc,,
|
||||
pip/_vendor/requests/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/gb2312prober.cpython-38.pyc,,
|
||||
pip/_vendor/pytoml/test.cpython-38.pyc,,
|
||||
pip/_vendor/cachecontrol/wrapper.cpython-38.pyc,,
|
||||
pip/_internal/utils/encoding.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/contrib/_securetransport/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/cachecontrol/serialize.cpython-38.pyc,,
|
||||
pip/_internal/models/format_control.cpython-38.pyc,,
|
||||
pip/_internal/operations/build/metadata.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/filters/base.cpython-38.pyc,,
|
||||
pip/_vendor/msgpack/exceptions.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/constants.cpython-38.pyc,,
|
||||
pip/_vendor/distlib/_backport/shutil.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/charsetgroupprober.cpython-38.pyc,,
|
||||
pip/_internal/cli/main.cpython-38.pyc,,
|
||||
pip/_internal/utils/virtualenv.cpython-38.pyc,,
|
||||
pip/_internal/vcs/bazaar.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/util/connection.cpython-38.pyc,,
|
||||
pip/_vendor/ipaddress.cpython-38.pyc,,
|
||||
pip/_vendor/colorama/ansitowin32.cpython-38.pyc,,
|
||||
pip/_internal/network/xmlrpc.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/euckrprober.cpython-38.pyc,,
|
||||
pip/_internal/cli/cmdoptions.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/treewalkers/etree_lxml.cpython-38.pyc,,
|
||||
pip/_internal/commands/completion.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/jisfreq.cpython-38.pyc,,
|
||||
pip/_vendor/distlib/database.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/euctwprober.cpython-38.pyc,,
|
||||
pip/_internal/utils/unpacking.cpython-38.pyc,,
|
||||
../../../bin/pip,,
|
||||
pip/_internal/utils/marker_files.cpython-38.pyc,,
|
||||
pip/_internal/exceptions.cpython-38.pyc,,
|
||||
pip/_internal/network/download.cpython-38.pyc,,
|
||||
pip/_internal/cli/req_command.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/latin1prober.cpython-38.pyc,,
|
||||
pip/_internal/vcs/git.cpython-38.pyc,,
|
||||
pip/__pycache__,,
|
||||
pip/_vendor/appdirs.cpython-38.pyc,,
|
||||
pip/_vendor/distlib/_backport/sysconfig.cpython-38.pyc,,
|
||||
pip/_vendor/packaging/_compat.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.cpython-38.pyc,,
|
||||
pip/_internal/commands/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/compat.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/treewalkers/genshi.cpython-38.pyc,,
|
||||
pip/_vendor/idna/package_data.cpython-38.pyc,,
|
||||
pip/_internal/cli/__pycache__,,
|
||||
pip/_vendor/distlib/_backport/misc.cpython-38.pyc,,
|
||||
pip/_vendor/cachecontrol/caches/file_cache.cpython-38.pyc,,
|
||||
pip/_internal/models/__init__.cpython-38.pyc,,
|
||||
pip/_internal/req/req_tracker.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/util/__pycache__,,
|
||||
pip/_vendor/urllib3/connection.cpython-38.pyc,,
|
||||
pip/_internal/distributions/base.cpython-38.pyc,,
|
||||
pip/_internal/commands/install.cpython-38.pyc,,
|
||||
pip/_internal/utils/urls.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/escprober.cpython-38.pyc,,
|
||||
pip/_vendor/requests/__pycache__,,
|
||||
pip/_vendor/distro.cpython-38.pyc,,
|
||||
pip/_internal/locations.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/_utils.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/eucjpprober.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/treebuilders/etree.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/util/wait.cpython-38.pyc,,
|
||||
pip/_vendor/cachecontrol/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/distlib/locators.cpython-38.pyc,,
|
||||
pip/_internal/operations/build/wheel_legacy.cpython-38.pyc,,
|
||||
pip/_internal/utils/glibc.cpython-38.pyc,,
|
||||
pip-20.0.2.virtualenv,,
|
||||
pip/_vendor/urllib3/packages/__init__.cpython-38.pyc,,
|
||||
pip/_internal/utils/deprecation.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/sjisprober.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/filters/sanitizer.cpython-38.pyc,,
|
||||
pip/_vendor/cachecontrol/compat.cpython-38.pyc,,
|
||||
pip/_internal/network/session.cpython-38.pyc,,
|
||||
pip/_internal/network/auth.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/euctwfreq.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/html5parser.cpython-38.pyc,,
|
||||
pip/_vendor/cachecontrol/_cmd.cpython-38.pyc,,
|
||||
pip/_internal/utils/temp_dir.cpython-38.pyc,,
|
||||
pip/_vendor/cachecontrol/filewrapper.cpython-38.pyc,,
|
||||
pip/_vendor/distlib/util.cpython-38.pyc,,
|
||||
pip/_internal/operations/install/__init__.cpython-38.pyc,,
|
||||
pip/_internal/commands/show.cpython-38.pyc,,
|
||||
pip/_internal/operations/build/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/cli/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/pkg_resources/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/cachecontrol/caches/__pycache__,,
|
||||
pip/__main__.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/util/request.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/util/retry.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/chardistribution.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/packages/backports/makefile.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/util/response.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/big5prober.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/contrib/appengine.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/util/ssl_.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/__pycache__,,
|
||||
pip/_vendor/chardet/langhebrewmodel.cpython-38.pyc,,
|
||||
pip/_vendor/packaging/tags.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/treebuilders/etree_lxml.cpython-38.pyc,,
|
||||
pip/_vendor/msgpack/_version.cpython-38.pyc,,
|
||||
pip/_internal/configuration.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/contrib/_securetransport/bindings.cpython-38.pyc,,
|
||||
pip/_vendor/progress/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/langhungarianmodel.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/__pycache__,,
|
||||
pip/_vendor/idna/codec.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/mbcsgroupprober.cpython-38.pyc,,
|
||||
pip/_vendor/__pycache__,,
|
||||
pip/_vendor/urllib3/packages/backports/__pycache__,,
|
||||
pip/_vendor/pep517/check.cpython-38.pyc,,
|
||||
pip/_internal/commands/__pycache__,,
|
||||
pip/_vendor/progress/spinner.cpython-38.pyc,,
|
||||
pip/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/requests/utils.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/mbcssm.cpython-38.pyc,,
|
||||
pip/_vendor/distlib/_backport/__pycache__,,
|
||||
pip/_vendor/cachecontrol/cache.cpython-38.pyc,,
|
||||
pip/_internal/models/__pycache__,,
|
||||
pip/_vendor/idna/intranges.cpython-38.pyc,,
|
||||
pip/_vendor/distlib/wheel.cpython-38.pyc,,
|
||||
pip/_vendor/requests/help.cpython-38.pyc,,
|
||||
pip/_internal/cli/main_parser.cpython-38.pyc,,
|
||||
pip/_internal/req/req_install.cpython-38.pyc,,
|
||||
pip/_vendor/webencodings/x_user_defined.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/_trie/datrie.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/langcyrillicmodel.cpython-38.pyc,,
|
||||
pip/_vendor/requests/hooks.cpython-38.pyc,,
|
||||
pip/_internal/utils/hashes.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/request.cpython-38.pyc,,
|
||||
pip/_vendor/cachecontrol/__pycache__,,
|
||||
pip/_vendor/requests/adapters.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/response.cpython-38.pyc,,
|
||||
pip/_vendor/certifi/__pycache__,,
|
||||
pip/_vendor/chardet/version.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/packages/__pycache__,,
|
||||
pip/_vendor/html5lib/treeadapters/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/requests/compat.cpython-38.pyc,,
|
||||
pip/_internal/vcs/mercurial.cpython-38.pyc,,
|
||||
pip/_internal/utils/wheel.cpython-38.pyc,,
|
||||
pip/_internal/self_outdated_check.cpython-38.pyc,,
|
||||
pip/_vendor/distlib/markers.cpython-38.pyc,,
|
||||
pip/_internal/utils/models.cpython-38.pyc,,
|
||||
pip/_internal/utils/inject_securetransport.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/_collections.cpython-38.pyc,,
|
||||
pip/_vendor/packaging/specifiers.cpython-38.pyc,,
|
||||
pip/_internal/operations/install/__pycache__,,
|
||||
../../../bin/pip-3.8,,
|
||||
pip/_vendor/distlib/scripts.cpython-38.pyc,,
|
||||
pip/_internal/operations/build/__pycache__,,
|
||||
pip/_vendor/chardet/cli/__pycache__,,
|
||||
pip/_vendor/pkg_resources/__pycache__,,
|
||||
pip/_vendor/pkg_resources/py31compat.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/contrib/_securetransport/low_level.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/connectionpool.cpython-38.pyc,,
|
||||
pip/_vendor/pep517/build.cpython-38.pyc,,
|
||||
pip/_vendor/cachecontrol/caches/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/langthaimodel.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/__pycache__,,
|
||||
pip/_vendor/html5lib/filters/alphabeticalattributes.cpython-38.pyc,,
|
||||
pip/_internal/cli/base_command.cpython-38.pyc,,
|
||||
pip/_vendor/progress/counter.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/progress/__pycache__,,
|
||||
pip/_internal/operations/install/editable_legacy.cpython-38.pyc,,
|
||||
pip/_internal/operations/install/legacy.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/filepost.cpython-38.pyc,,
|
||||
pip/_vendor/idna/idnadata.cpython-38.pyc,,
|
||||
pip/_vendor/contextlib2.cpython-38.pyc,,
|
||||
pip/_internal/vcs/subversion.cpython-38.pyc,,
|
||||
pip/_internal/operations/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/_trie/py.cpython-38.pyc,,
|
||||
pip/_vendor/msgpack/__init__.cpython-38.pyc,,
|
||||
pip/_internal/network/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/pytoml/parser.cpython-38.pyc,,
|
||||
pip/_internal/commands/help.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/_trie/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/packages/backports/__init__.cpython-38.pyc,,
|
||||
pip/_internal/models/search_scope.cpython-38.pyc,,
|
||||
pip/_vendor/certifi/__main__.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/gb2312freq.cpython-38.pyc,,
|
||||
pip-20.0.2.dist-info/INSTALLER,,
|
||||
pip/_vendor/distlib/_backport/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/sbcharsetprober.cpython-38.pyc,,
|
||||
pip/_internal/vcs/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/distlib/__init__.cpython-38.pyc,,
|
||||
pip/_internal/req/constructors.cpython-38.pyc,,
|
||||
pip/_internal/operations/prepare.cpython-38.pyc,,
|
||||
pip/_vendor/packaging/_typing.cpython-38.pyc,,
|
||||
pip/_vendor/pyparsing.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/utf8prober.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/contrib/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/packaging/__pycache__,,
|
||||
pip/_internal/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/universaldetector.cpython-38.pyc,,
|
||||
pip/_vendor/webencodings/tests.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/_inputstream.cpython-38.pyc,,
|
||||
pip/_vendor/distlib/_backport/tarfile.cpython-38.pyc,,
|
||||
pip/_internal/operations/build/metadata_legacy.cpython-38.pyc,,
|
||||
pip/_vendor/certifi/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/pep517/envbuild.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/treeadapters/__pycache__,,
|
||||
pip/_internal/utils/appdirs.cpython-38.pyc,,
|
||||
pip/_internal/models/scheme.cpython-38.pyc,,
|
||||
pip/_internal/utils/misc.cpython-38.pyc,,
|
||||
pip/_internal/cli/command_context.cpython-38.pyc,,
|
||||
pip/_internal/req/req_uninstall.cpython-38.pyc,,
|
||||
pip/_vendor/certifi/core.cpython-38.pyc,,
|
||||
pip/_internal/utils/ui.cpython-38.pyc,,
|
||||
pip/_internal/cli/status_codes.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/colorama/ansi.cpython-38.pyc,,
|
||||
pip/_internal/index/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/langbulgarianmodel.cpython-38.pyc,,
|
||||
pip/_vendor/webencodings/labels.cpython-38.pyc,,
|
||||
pip/_vendor/pep517/wrappers.cpython-38.pyc,,
|
||||
pip/_vendor/pep517/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/requests/status_codes.cpython-38.pyc,,
|
||||
pip/_vendor/packaging/markers.cpython-38.pyc,,
|
||||
pip/_internal/commands/hash.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/treewalkers/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/requests/models.cpython-38.pyc,,
|
||||
pip/_internal/build_env.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/cp949prober.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/__init__.cpython-38.pyc,,
|
||||
../../../bin/pip3.8,,
|
||||
pip/_vendor/distlib/metadata.cpython-38.pyc,,
|
||||
pip/_vendor/pep517/meta.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/_trie/_base.cpython-38.pyc,,
|
||||
pip/_internal/utils/subprocess.cpython-38.pyc,,
|
||||
pip/_internal/operations/__pycache__,,
|
||||
pip/_vendor/html5lib/_tokenizer.cpython-38.pyc,,
|
||||
pip/_vendor/msgpack/__pycache__,,
|
||||
pip/_internal/network/__pycache__,,
|
||||
pip/_vendor/packaging/_structures.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/_trie/__pycache__,,
|
||||
pip/_internal/commands/uninstall.cpython-38.pyc,,
|
||||
pip/_internal/utils/typing.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/enums.cpython-38.pyc,,
|
||||
pip/_vendor/packaging/utils.cpython-38.pyc,,
|
||||
pip/_vendor/requests/exceptions.cpython-38.pyc,,
|
||||
pip/_internal/vcs/__pycache__,,
|
||||
pip/_vendor/distlib/__pycache__,,
|
||||
pip/_internal/utils/pkg_resources.cpython-38.pyc,,
|
||||
pip/_internal/pep425tags.cpython-38.pyc,,
|
||||
pip/_internal/models/index.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/contrib/__pycache__,,
|
||||
pip/_vendor/html5lib/filters/lint.cpython-38.pyc,,
|
||||
pip/_internal/utils/distutils_args.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/fields.cpython-38.pyc,,
|
||||
pip/_vendor/packaging/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/webencodings/mklabels.cpython-38.pyc,,
|
||||
pip/_internal/__pycache__,,
|
||||
pip/_internal/req/__init__.cpython-38.pyc,,
|
||||
pip/_vendor/chardet/mbcharsetprober.cpython-38.pyc,,
|
||||
pip/_internal/req/req_set.cpython-38.pyc,,
|
||||
pip/_internal/distributions/wheel.cpython-38.pyc,,
|
||||
pip/_vendor/distlib/manifest.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/contrib/ntlmpool.cpython-38.pyc,,
|
||||
pip/_vendor/msgpack/fallback.cpython-38.pyc,,
|
||||
pip/_internal/pyproject.cpython-38.pyc,,
|
||||
pip/_vendor/html5lib/treeadapters/sax.cpython-38.pyc,,
|
||||
pip/_internal/utils/packaging.cpython-38.pyc,,
|
||||
pip-20.0.2.dist-info/__pycache__,,
|
||||
pip/_internal/models/link.cpython-38.pyc,,
|
||||
pip/_vendor/urllib3/util/queue.cpython-38.pyc,,
|
@ -1,6 +0,0 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.33.6)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
|
@ -1,5 +0,0 @@
|
||||
[console_scripts]
|
||||
pip = pip._internal.cli.main:main
|
||||
pip3 = pip._internal.cli.main:main
|
||||
pip3.8 = pip._internal.cli.main:main
|
||||
|
@ -1 +0,0 @@
|
||||
pip
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user