Add existing file
This commit is contained in:
parent
4925e10445
commit
e3578b866a
68
.bash_history
Normal file
68
.bash_history
Normal file
@ -0,0 +1,68 @@
|
||||
sudo apt-get update
|
||||
sudo apt-get upgrade
|
||||
sudo apt-get dist-upgrade
|
||||
stack install
|
||||
sudo apt install haskell-stack
|
||||
pip install kaggle
|
||||
sudo apt install python3-pip
|
||||
pip install kaggle
|
||||
pip install pandas
|
||||
kaggle datasets list -s new york
|
||||
kaggle -h
|
||||
kaggle
|
||||
-h kaggle
|
||||
help kaggle
|
||||
info kaggle
|
||||
help help
|
||||
kaggle datasets
|
||||
mkdir
|
||||
mkdir --help
|
||||
cd ~/.kaggle
|
||||
cd ~/
|
||||
ls
|
||||
ls korne
|
||||
cd Kornelia Girejko
|
||||
cd C:\Users\korne\.kaggle
|
||||
cd C:/Users/korne/.kaggle
|
||||
cd ~/.kaggle
|
||||
cd ~\.kaggle
|
||||
cd home
|
||||
cd nela
|
||||
ls
|
||||
ls -r
|
||||
cd ..
|
||||
cd nela
|
||||
ls
|
||||
ls
|
||||
git
|
||||
git remote add origin https://git.wmi.amu.edu.pl/s478815/ium_478815.git
|
||||
touch README.md
|
||||
git init
|
||||
git add README.md
|
||||
git commit -m "first commit"
|
||||
git remote add origin https://git.wmi.amu.edu.pl/s478815/ium_478815.git
|
||||
git push -u origin master
|
||||
touch README.md
|
||||
git init
|
||||
git add README.md
|
||||
git commit -m "first commit"
|
||||
git config --global user.email "korgir@st.amu.edu.pl"
|
||||
git config --global user.name "Kornelia"
|
||||
git remote add origin https://git.wmi.amu.edu.pl/s478815/ium_478815.git
|
||||
git push -u origin master
|
||||
sudo git install
|
||||
git --help
|
||||
sudo apt install git-all
|
||||
sudo apt install git
|
||||
git remote add origin https://git.wmi.amu.edu.pl/s478815/ium_478815.git
|
||||
git push -u origin master
|
||||
git commit -m "first commit"
|
||||
git add README.md
|
||||
git remote add origin https://git.wmi.amu.edu.pl/s478815/ium_478815.git
|
||||
git push -u origin master
|
||||
virtualenv environment
|
||||
wsl --set-default-version 2
|
||||
dism.exe /online /enable-feature /featurename:Microsoft-Windows-Subsystem-Linux /all /norestart
|
||||
docker run -d -p 80:80 docker/getting-started
|
||||
docker build .
|
||||
docker
|
7
.bash_logout
Normal file
7
.bash_logout
Normal file
@ -0,0 +1,7 @@
|
||||
# ~/.bash_logout: executed by bash(1) when login shell exits.
|
||||
|
||||
# when leaving the console clear the screen to increase privacy
|
||||
|
||||
if [ "$SHLVL" = 1 ]; then
|
||||
[ -x /usr/bin/clear_console ] && /usr/bin/clear_console -q
|
||||
fi
|
117
.bashrc
Normal file
117
.bashrc
Normal file
@ -0,0 +1,117 @@
|
||||
# ~/.bashrc: executed by bash(1) for non-login shells.
|
||||
# see /usr/share/doc/bash/examples/startup-files (in the package bash-doc)
|
||||
# for examples
|
||||
|
||||
# If not running interactively, don't do anything
|
||||
case $- in
|
||||
*i*) ;;
|
||||
*) return;;
|
||||
esac
|
||||
|
||||
# don't put duplicate lines or lines starting with space in the history.
|
||||
# See bash(1) for more options
|
||||
HISTCONTROL=ignoreboth
|
||||
|
||||
# append to the history file, don't overwrite it
|
||||
shopt -s histappend
|
||||
|
||||
# for setting history length see HISTSIZE and HISTFILESIZE in bash(1)
|
||||
HISTSIZE=1000
|
||||
HISTFILESIZE=2000
|
||||
|
||||
# check the window size after each command and, if necessary,
|
||||
# update the values of LINES and COLUMNS.
|
||||
shopt -s checkwinsize
|
||||
|
||||
# If set, the pattern "**" used in a pathname expansion context will
|
||||
# match all files and zero or more directories and subdirectories.
|
||||
#shopt -s globstar
|
||||
|
||||
# make less more friendly for non-text input files, see lesspipe(1)
|
||||
[ -x /usr/bin/lesspipe ] && eval "$(SHELL=/bin/sh lesspipe)"
|
||||
|
||||
# set variable identifying the chroot you work in (used in the prompt below)
|
||||
if [ -z "${debian_chroot:-}" ] && [ -r /etc/debian_chroot ]; then
|
||||
debian_chroot=$(cat /etc/debian_chroot)
|
||||
fi
|
||||
|
||||
# set a fancy prompt (non-color, unless we know we "want" color)
|
||||
case "$TERM" in
|
||||
xterm-color|*-256color) color_prompt=yes;;
|
||||
esac
|
||||
|
||||
# uncomment for a colored prompt, if the terminal has the capability; turned
|
||||
# off by default to not distract the user: the focus in a terminal window
|
||||
# should be on the output of commands, not on the prompt
|
||||
#force_color_prompt=yes
|
||||
|
||||
if [ -n "$force_color_prompt" ]; then
|
||||
if [ -x /usr/bin/tput ] && tput setaf 1 >&/dev/null; then
|
||||
# We have color support; assume it's compliant with Ecma-48
|
||||
# (ISO/IEC-6429). (Lack of such support is extremely rare, and such
|
||||
# a case would tend to support setf rather than setaf.)
|
||||
color_prompt=yes
|
||||
else
|
||||
color_prompt=
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$color_prompt" = yes ]; then
|
||||
PS1='${debian_chroot:+($debian_chroot)}\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '
|
||||
else
|
||||
PS1='${debian_chroot:+($debian_chroot)}\u@\h:\w\$ '
|
||||
fi
|
||||
unset color_prompt force_color_prompt
|
||||
|
||||
# If this is an xterm set the title to user@host:dir
|
||||
case "$TERM" in
|
||||
xterm*|rxvt*)
|
||||
PS1="\[\e]0;${debian_chroot:+($debian_chroot)}\u@\h: \w\a\]$PS1"
|
||||
;;
|
||||
*)
|
||||
;;
|
||||
esac
|
||||
|
||||
# enable color support of ls and also add handy aliases
|
||||
if [ -x /usr/bin/dircolors ]; then
|
||||
test -r ~/.dircolors && eval "$(dircolors -b ~/.dircolors)" || eval "$(dircolors -b)"
|
||||
alias ls='ls --color=auto'
|
||||
#alias dir='dir --color=auto'
|
||||
#alias vdir='vdir --color=auto'
|
||||
|
||||
alias grep='grep --color=auto'
|
||||
alias fgrep='fgrep --color=auto'
|
||||
alias egrep='egrep --color=auto'
|
||||
fi
|
||||
|
||||
# colored GCC warnings and errors
|
||||
#export GCC_COLORS='error=01;31:warning=01;35:note=01;36:caret=01;32:locus=01:quote=01'
|
||||
|
||||
# some more ls aliases
|
||||
alias ll='ls -alF'
|
||||
alias la='ls -A'
|
||||
alias l='ls -CF'
|
||||
|
||||
# Add an "alert" alias for long running commands. Use like so:
|
||||
# sleep 10; alert
|
||||
alias alert='notify-send --urgency=low -i "$([ $? = 0 ] && echo terminal || echo error)" "$(history|tail -n1|sed -e '\''s/^\s*[0-9]\+\s*//;s/[;&|]\s*alert$//'\'')"'
|
||||
|
||||
# Alias definitions.
|
||||
# You may want to put all your additions into a separate file like
|
||||
# ~/.bash_aliases, instead of adding them here directly.
|
||||
# See /usr/share/doc/bash-doc/examples in the bash-doc package.
|
||||
|
||||
if [ -f ~/.bash_aliases ]; then
|
||||
. ~/.bash_aliases
|
||||
fi
|
||||
|
||||
# enable programmable completion features (you don't need to enable
|
||||
# this, if it's already enabled in /etc/bash.bashrc and /etc/profile
|
||||
# sources /etc/bash.bashrc).
|
||||
if ! shopt -oq posix; then
|
||||
if [ -f /usr/share/bash-completion/bash_completion ]; then
|
||||
. /usr/share/bash-completion/bash_completion
|
||||
elif [ -f /etc/bash_completion ]; then
|
||||
. /etc/bash_completion
|
||||
fi
|
||||
fi
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
3
.gitconfig
Normal file
3
.gitconfig
Normal file
@ -0,0 +1,3 @@
|
||||
[user]
|
||||
email = korgir@st.amu.edu.pl
|
||||
name = Kornelia
|
0
.landscape/sysinfo.log
Normal file
0
.landscape/sysinfo.log
Normal file
8
.local/bin/f2py
Executable file
8
.local/bin/f2py
Executable file
@ -0,0 +1,8 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from numpy.f2py.f2py2e import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
.local/bin/f2py3
Executable file
8
.local/bin/f2py3
Executable file
@ -0,0 +1,8 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from numpy.f2py.f2py2e import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
.local/bin/f2py3.8
Executable file
8
.local/bin/f2py3.8
Executable file
@ -0,0 +1,8 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from numpy.f2py.f2py2e import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
.local/bin/kaggle
Executable file
8
.local/bin/kaggle
Executable file
@ -0,0 +1,8 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from kaggle.cli import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
.local/bin/tqdm
Executable file
8
.local/bin/tqdm
Executable file
@ -0,0 +1,8 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from tqdm.cli import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
.local/lib/python3.8/site-packages/dateutil/__init__.py
Normal file
8
.local/lib/python3.8/site-packages/dateutil/__init__.py
Normal file
@ -0,0 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
try:
|
||||
from ._version import version as __version__
|
||||
except ImportError:
|
||||
__version__ = 'unknown'
|
||||
|
||||
__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz',
|
||||
'utils', 'zoneinfo']
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
43
.local/lib/python3.8/site-packages/dateutil/_common.py
Normal file
43
.local/lib/python3.8/site-packages/dateutil/_common.py
Normal file
@ -0,0 +1,43 @@
|
||||
"""
|
||||
Common code used in multiple modules.
|
||||
"""
|
||||
|
||||
|
||||
class weekday(object):
|
||||
__slots__ = ["weekday", "n"]
|
||||
|
||||
def __init__(self, weekday, n=None):
|
||||
self.weekday = weekday
|
||||
self.n = n
|
||||
|
||||
def __call__(self, n):
|
||||
if n == self.n:
|
||||
return self
|
||||
else:
|
||||
return self.__class__(self.weekday, n)
|
||||
|
||||
def __eq__(self, other):
|
||||
try:
|
||||
if self.weekday != other.weekday or self.n != other.n:
|
||||
return False
|
||||
except AttributeError:
|
||||
return False
|
||||
return True
|
||||
|
||||
def __hash__(self):
|
||||
return hash((
|
||||
self.weekday,
|
||||
self.n,
|
||||
))
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
def __repr__(self):
|
||||
s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday]
|
||||
if not self.n:
|
||||
return s
|
||||
else:
|
||||
return "%s(%+d)" % (s, self.n)
|
||||
|
||||
# vim:ts=4:sw=4:et
|
5
.local/lib/python3.8/site-packages/dateutil/_version.py
Normal file
5
.local/lib/python3.8/site-packages/dateutil/_version.py
Normal file
@ -0,0 +1,5 @@
|
||||
# coding: utf-8
|
||||
# file generated by setuptools_scm
|
||||
# don't change, don't track in version control
|
||||
version = '2.8.2'
|
||||
version_tuple = (2, 8, 2)
|
89
.local/lib/python3.8/site-packages/dateutil/easter.py
Normal file
89
.local/lib/python3.8/site-packages/dateutil/easter.py
Normal file
@ -0,0 +1,89 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
This module offers a generic Easter computing method for any given year, using
|
||||
Western, Orthodox or Julian algorithms.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
|
||||
__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"]
|
||||
|
||||
EASTER_JULIAN = 1
|
||||
EASTER_ORTHODOX = 2
|
||||
EASTER_WESTERN = 3
|
||||
|
||||
|
||||
def easter(year, method=EASTER_WESTERN):
|
||||
"""
|
||||
This method was ported from the work done by GM Arts,
|
||||
on top of the algorithm by Claus Tondering, which was
|
||||
based in part on the algorithm of Ouding (1940), as
|
||||
quoted in "Explanatory Supplement to the Astronomical
|
||||
Almanac", P. Kenneth Seidelmann, editor.
|
||||
|
||||
This algorithm implements three different Easter
|
||||
calculation methods:
|
||||
|
||||
1. Original calculation in Julian calendar, valid in
|
||||
dates after 326 AD
|
||||
2. Original method, with date converted to Gregorian
|
||||
calendar, valid in years 1583 to 4099
|
||||
3. Revised method, in Gregorian calendar, valid in
|
||||
years 1583 to 4099 as well
|
||||
|
||||
These methods are represented by the constants:
|
||||
|
||||
* ``EASTER_JULIAN = 1``
|
||||
* ``EASTER_ORTHODOX = 2``
|
||||
* ``EASTER_WESTERN = 3``
|
||||
|
||||
The default method is method 3.
|
||||
|
||||
More about the algorithm may be found at:
|
||||
|
||||
`GM Arts: Easter Algorithms <http://www.gmarts.org/index.php?go=415>`_
|
||||
|
||||
and
|
||||
|
||||
`The Calendar FAQ: Easter <https://www.tondering.dk/claus/cal/easter.php>`_
|
||||
|
||||
"""
|
||||
|
||||
if not (1 <= method <= 3):
|
||||
raise ValueError("invalid method")
|
||||
|
||||
# g - Golden year - 1
|
||||
# c - Century
|
||||
# h - (23 - Epact) mod 30
|
||||
# i - Number of days from March 21 to Paschal Full Moon
|
||||
# j - Weekday for PFM (0=Sunday, etc)
|
||||
# p - Number of days from March 21 to Sunday on or before PFM
|
||||
# (-6 to 28 methods 1 & 3, to 56 for method 2)
|
||||
# e - Extra days to add for method 2 (converting Julian
|
||||
# date to Gregorian date)
|
||||
|
||||
y = year
|
||||
g = y % 19
|
||||
e = 0
|
||||
if method < 3:
|
||||
# Old method
|
||||
i = (19*g + 15) % 30
|
||||
j = (y + y//4 + i) % 7
|
||||
if method == 2:
|
||||
# Extra dates to convert Julian to Gregorian date
|
||||
e = 10
|
||||
if y > 1600:
|
||||
e = e + y//100 - 16 - (y//100 - 16)//4
|
||||
else:
|
||||
# New method
|
||||
c = y//100
|
||||
h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30
|
||||
i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11))
|
||||
j = (y + y//4 + i + 2 - c + c//4) % 7
|
||||
|
||||
# p can be from -6 to 56 corresponding to dates 22 March to 23 May
|
||||
# (later dates apply to method 2, although 23 May never actually occurs)
|
||||
p = i - j + e
|
||||
d = 1 + (p + 27 + (p + 6)//40) % 31
|
||||
m = 3 + (p + 26)//30
|
||||
return datetime.date(int(y), int(m), int(d))
|
@ -0,0 +1,61 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from ._parser import parse, parser, parserinfo, ParserError
|
||||
from ._parser import DEFAULTPARSER, DEFAULTTZPARSER
|
||||
from ._parser import UnknownTimezoneWarning
|
||||
|
||||
from ._parser import __doc__
|
||||
|
||||
from .isoparser import isoparser, isoparse
|
||||
|
||||
__all__ = ['parse', 'parser', 'parserinfo',
|
||||
'isoparse', 'isoparser',
|
||||
'ParserError',
|
||||
'UnknownTimezoneWarning']
|
||||
|
||||
|
||||
###
|
||||
# Deprecate portions of the private interface so that downstream code that
|
||||
# is improperly relying on it is given *some* notice.
|
||||
|
||||
|
||||
def __deprecated_private_func(f):
|
||||
from functools import wraps
|
||||
import warnings
|
||||
|
||||
msg = ('{name} is a private function and may break without warning, '
|
||||
'it will be moved and or renamed in future versions.')
|
||||
msg = msg.format(name=f.__name__)
|
||||
|
||||
@wraps(f)
|
||||
def deprecated_func(*args, **kwargs):
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return deprecated_func
|
||||
|
||||
def __deprecate_private_class(c):
|
||||
import warnings
|
||||
|
||||
msg = ('{name} is a private class and may break without warning, '
|
||||
'it will be moved and or renamed in future versions.')
|
||||
msg = msg.format(name=c.__name__)
|
||||
|
||||
class private_class(c):
|
||||
__doc__ = c.__doc__
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
super(private_class, self).__init__(*args, **kwargs)
|
||||
|
||||
private_class.__name__ = c.__name__
|
||||
|
||||
return private_class
|
||||
|
||||
|
||||
from ._parser import _timelex, _resultbase
|
||||
from ._parser import _tzparser, _parsetz
|
||||
|
||||
_timelex = __deprecate_private_class(_timelex)
|
||||
_tzparser = __deprecate_private_class(_tzparser)
|
||||
_resultbase = __deprecate_private_class(_resultbase)
|
||||
_parsetz = __deprecated_private_func(_parsetz)
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
1613
.local/lib/python3.8/site-packages/dateutil/parser/_parser.py
Normal file
1613
.local/lib/python3.8/site-packages/dateutil/parser/_parser.py
Normal file
File diff suppressed because it is too large
Load Diff
416
.local/lib/python3.8/site-packages/dateutil/parser/isoparser.py
Normal file
416
.local/lib/python3.8/site-packages/dateutil/parser/isoparser.py
Normal file
@ -0,0 +1,416 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
This module offers a parser for ISO-8601 strings
|
||||
|
||||
It is intended to support all valid date, time and datetime formats per the
|
||||
ISO-8601 specification.
|
||||
|
||||
..versionadded:: 2.7.0
|
||||
"""
|
||||
from datetime import datetime, timedelta, time, date
|
||||
import calendar
|
||||
from dateutil import tz
|
||||
|
||||
from functools import wraps
|
||||
|
||||
import re
|
||||
import six
|
||||
|
||||
__all__ = ["isoparse", "isoparser"]
|
||||
|
||||
|
||||
def _takes_ascii(f):
|
||||
@wraps(f)
|
||||
def func(self, str_in, *args, **kwargs):
|
||||
# If it's a stream, read the whole thing
|
||||
str_in = getattr(str_in, 'read', lambda: str_in)()
|
||||
|
||||
# If it's unicode, turn it into bytes, since ISO-8601 only covers ASCII
|
||||
if isinstance(str_in, six.text_type):
|
||||
# ASCII is the same in UTF-8
|
||||
try:
|
||||
str_in = str_in.encode('ascii')
|
||||
except UnicodeEncodeError as e:
|
||||
msg = 'ISO-8601 strings should contain only ASCII characters'
|
||||
six.raise_from(ValueError(msg), e)
|
||||
|
||||
return f(self, str_in, *args, **kwargs)
|
||||
|
||||
return func
|
||||
|
||||
|
||||
class isoparser(object):
|
||||
def __init__(self, sep=None):
|
||||
"""
|
||||
:param sep:
|
||||
A single character that separates date and time portions. If
|
||||
``None``, the parser will accept any single character.
|
||||
For strict ISO-8601 adherence, pass ``'T'``.
|
||||
"""
|
||||
if sep is not None:
|
||||
if (len(sep) != 1 or ord(sep) >= 128 or sep in '0123456789'):
|
||||
raise ValueError('Separator must be a single, non-numeric ' +
|
||||
'ASCII character')
|
||||
|
||||
sep = sep.encode('ascii')
|
||||
|
||||
self._sep = sep
|
||||
|
||||
@_takes_ascii
|
||||
def isoparse(self, dt_str):
|
||||
"""
|
||||
Parse an ISO-8601 datetime string into a :class:`datetime.datetime`.
|
||||
|
||||
An ISO-8601 datetime string consists of a date portion, followed
|
||||
optionally by a time portion - the date and time portions are separated
|
||||
by a single character separator, which is ``T`` in the official
|
||||
standard. Incomplete date formats (such as ``YYYY-MM``) may *not* be
|
||||
combined with a time portion.
|
||||
|
||||
Supported date formats are:
|
||||
|
||||
Common:
|
||||
|
||||
- ``YYYY``
|
||||
- ``YYYY-MM`` or ``YYYYMM``
|
||||
- ``YYYY-MM-DD`` or ``YYYYMMDD``
|
||||
|
||||
Uncommon:
|
||||
|
||||
- ``YYYY-Www`` or ``YYYYWww`` - ISO week (day defaults to 0)
|
||||
- ``YYYY-Www-D`` or ``YYYYWwwD`` - ISO week and day
|
||||
|
||||
The ISO week and day numbering follows the same logic as
|
||||
:func:`datetime.date.isocalendar`.
|
||||
|
||||
Supported time formats are:
|
||||
|
||||
- ``hh``
|
||||
- ``hh:mm`` or ``hhmm``
|
||||
- ``hh:mm:ss`` or ``hhmmss``
|
||||
- ``hh:mm:ss.ssssss`` (Up to 6 sub-second digits)
|
||||
|
||||
Midnight is a special case for `hh`, as the standard supports both
|
||||
00:00 and 24:00 as a representation. The decimal separator can be
|
||||
either a dot or a comma.
|
||||
|
||||
|
||||
.. caution::
|
||||
|
||||
Support for fractional components other than seconds is part of the
|
||||
ISO-8601 standard, but is not currently implemented in this parser.
|
||||
|
||||
Supported time zone offset formats are:
|
||||
|
||||
- `Z` (UTC)
|
||||
- `±HH:MM`
|
||||
- `±HHMM`
|
||||
- `±HH`
|
||||
|
||||
Offsets will be represented as :class:`dateutil.tz.tzoffset` objects,
|
||||
with the exception of UTC, which will be represented as
|
||||
:class:`dateutil.tz.tzutc`. Time zone offsets equivalent to UTC (such
|
||||
as `+00:00`) will also be represented as :class:`dateutil.tz.tzutc`.
|
||||
|
||||
:param dt_str:
|
||||
A string or stream containing only an ISO-8601 datetime string
|
||||
|
||||
:return:
|
||||
Returns a :class:`datetime.datetime` representing the string.
|
||||
Unspecified components default to their lowest value.
|
||||
|
||||
.. warning::
|
||||
|
||||
As of version 2.7.0, the strictness of the parser should not be
|
||||
considered a stable part of the contract. Any valid ISO-8601 string
|
||||
that parses correctly with the default settings will continue to
|
||||
parse correctly in future versions, but invalid strings that
|
||||
currently fail (e.g. ``2017-01-01T00:00+00:00:00``) are not
|
||||
guaranteed to continue failing in future versions if they encode
|
||||
a valid date.
|
||||
|
||||
.. versionadded:: 2.7.0
|
||||
"""
|
||||
components, pos = self._parse_isodate(dt_str)
|
||||
|
||||
if len(dt_str) > pos:
|
||||
if self._sep is None or dt_str[pos:pos + 1] == self._sep:
|
||||
components += self._parse_isotime(dt_str[pos + 1:])
|
||||
else:
|
||||
raise ValueError('String contains unknown ISO components')
|
||||
|
||||
if len(components) > 3 and components[3] == 24:
|
||||
components[3] = 0
|
||||
return datetime(*components) + timedelta(days=1)
|
||||
|
||||
return datetime(*components)
|
||||
|
||||
@_takes_ascii
|
||||
def parse_isodate(self, datestr):
|
||||
"""
|
||||
Parse the date portion of an ISO string.
|
||||
|
||||
:param datestr:
|
||||
The string portion of an ISO string, without a separator
|
||||
|
||||
:return:
|
||||
Returns a :class:`datetime.date` object
|
||||
"""
|
||||
components, pos = self._parse_isodate(datestr)
|
||||
if pos < len(datestr):
|
||||
raise ValueError('String contains unknown ISO ' +
|
||||
'components: {!r}'.format(datestr.decode('ascii')))
|
||||
return date(*components)
|
||||
|
||||
@_takes_ascii
|
||||
def parse_isotime(self, timestr):
|
||||
"""
|
||||
Parse the time portion of an ISO string.
|
||||
|
||||
:param timestr:
|
||||
The time portion of an ISO string, without a separator
|
||||
|
||||
:return:
|
||||
Returns a :class:`datetime.time` object
|
||||
"""
|
||||
components = self._parse_isotime(timestr)
|
||||
if components[0] == 24:
|
||||
components[0] = 0
|
||||
return time(*components)
|
||||
|
||||
@_takes_ascii
|
||||
def parse_tzstr(self, tzstr, zero_as_utc=True):
|
||||
"""
|
||||
Parse a valid ISO time zone string.
|
||||
|
||||
See :func:`isoparser.isoparse` for details on supported formats.
|
||||
|
||||
:param tzstr:
|
||||
A string representing an ISO time zone offset
|
||||
|
||||
:param zero_as_utc:
|
||||
Whether to return :class:`dateutil.tz.tzutc` for zero-offset zones
|
||||
|
||||
:return:
|
||||
Returns :class:`dateutil.tz.tzoffset` for offsets and
|
||||
:class:`dateutil.tz.tzutc` for ``Z`` and (if ``zero_as_utc`` is
|
||||
specified) offsets equivalent to UTC.
|
||||
"""
|
||||
return self._parse_tzstr(tzstr, zero_as_utc=zero_as_utc)
|
||||
|
||||
# Constants
|
||||
_DATE_SEP = b'-'
|
||||
_TIME_SEP = b':'
|
||||
_FRACTION_REGEX = re.compile(b'[\\.,]([0-9]+)')
|
||||
|
||||
def _parse_isodate(self, dt_str):
|
||||
try:
|
||||
return self._parse_isodate_common(dt_str)
|
||||
except ValueError:
|
||||
return self._parse_isodate_uncommon(dt_str)
|
||||
|
||||
def _parse_isodate_common(self, dt_str):
|
||||
len_str = len(dt_str)
|
||||
components = [1, 1, 1]
|
||||
|
||||
if len_str < 4:
|
||||
raise ValueError('ISO string too short')
|
||||
|
||||
# Year
|
||||
components[0] = int(dt_str[0:4])
|
||||
pos = 4
|
||||
if pos >= len_str:
|
||||
return components, pos
|
||||
|
||||
has_sep = dt_str[pos:pos + 1] == self._DATE_SEP
|
||||
if has_sep:
|
||||
pos += 1
|
||||
|
||||
# Month
|
||||
if len_str - pos < 2:
|
||||
raise ValueError('Invalid common month')
|
||||
|
||||
components[1] = int(dt_str[pos:pos + 2])
|
||||
pos += 2
|
||||
|
||||
if pos >= len_str:
|
||||
if has_sep:
|
||||
return components, pos
|
||||
else:
|
||||
raise ValueError('Invalid ISO format')
|
||||
|
||||
if has_sep:
|
||||
if dt_str[pos:pos + 1] != self._DATE_SEP:
|
||||
raise ValueError('Invalid separator in ISO string')
|
||||
pos += 1
|
||||
|
||||
# Day
|
||||
if len_str - pos < 2:
|
||||
raise ValueError('Invalid common day')
|
||||
components[2] = int(dt_str[pos:pos + 2])
|
||||
return components, pos + 2
|
||||
|
||||
def _parse_isodate_uncommon(self, dt_str):
|
||||
if len(dt_str) < 4:
|
||||
raise ValueError('ISO string too short')
|
||||
|
||||
# All ISO formats start with the year
|
||||
year = int(dt_str[0:4])
|
||||
|
||||
has_sep = dt_str[4:5] == self._DATE_SEP
|
||||
|
||||
pos = 4 + has_sep # Skip '-' if it's there
|
||||
if dt_str[pos:pos + 1] == b'W':
|
||||
# YYYY-?Www-?D?
|
||||
pos += 1
|
||||
weekno = int(dt_str[pos:pos + 2])
|
||||
pos += 2
|
||||
|
||||
dayno = 1
|
||||
if len(dt_str) > pos:
|
||||
if (dt_str[pos:pos + 1] == self._DATE_SEP) != has_sep:
|
||||
raise ValueError('Inconsistent use of dash separator')
|
||||
|
||||
pos += has_sep
|
||||
|
||||
dayno = int(dt_str[pos:pos + 1])
|
||||
pos += 1
|
||||
|
||||
base_date = self._calculate_weekdate(year, weekno, dayno)
|
||||
else:
|
||||
# YYYYDDD or YYYY-DDD
|
||||
if len(dt_str) - pos < 3:
|
||||
raise ValueError('Invalid ordinal day')
|
||||
|
||||
ordinal_day = int(dt_str[pos:pos + 3])
|
||||
pos += 3
|
||||
|
||||
if ordinal_day < 1 or ordinal_day > (365 + calendar.isleap(year)):
|
||||
raise ValueError('Invalid ordinal day' +
|
||||
' {} for year {}'.format(ordinal_day, year))
|
||||
|
||||
base_date = date(year, 1, 1) + timedelta(days=ordinal_day - 1)
|
||||
|
||||
components = [base_date.year, base_date.month, base_date.day]
|
||||
return components, pos
|
||||
|
||||
def _calculate_weekdate(self, year, week, day):
|
||||
"""
|
||||
Calculate the day of corresponding to the ISO year-week-day calendar.
|
||||
|
||||
This function is effectively the inverse of
|
||||
:func:`datetime.date.isocalendar`.
|
||||
|
||||
:param year:
|
||||
The year in the ISO calendar
|
||||
|
||||
:param week:
|
||||
The week in the ISO calendar - range is [1, 53]
|
||||
|
||||
:param day:
|
||||
The day in the ISO calendar - range is [1 (MON), 7 (SUN)]
|
||||
|
||||
:return:
|
||||
Returns a :class:`datetime.date`
|
||||
"""
|
||||
if not 0 < week < 54:
|
||||
raise ValueError('Invalid week: {}'.format(week))
|
||||
|
||||
if not 0 < day < 8: # Range is 1-7
|
||||
raise ValueError('Invalid weekday: {}'.format(day))
|
||||
|
||||
# Get week 1 for the specific year:
|
||||
jan_4 = date(year, 1, 4) # Week 1 always has January 4th in it
|
||||
week_1 = jan_4 - timedelta(days=jan_4.isocalendar()[2] - 1)
|
||||
|
||||
# Now add the specific number of weeks and days to get what we want
|
||||
week_offset = (week - 1) * 7 + (day - 1)
|
||||
return week_1 + timedelta(days=week_offset)
|
||||
|
||||
def _parse_isotime(self, timestr):
|
||||
len_str = len(timestr)
|
||||
components = [0, 0, 0, 0, None]
|
||||
pos = 0
|
||||
comp = -1
|
||||
|
||||
if len_str < 2:
|
||||
raise ValueError('ISO time too short')
|
||||
|
||||
has_sep = False
|
||||
|
||||
while pos < len_str and comp < 5:
|
||||
comp += 1
|
||||
|
||||
if timestr[pos:pos + 1] in b'-+Zz':
|
||||
# Detect time zone boundary
|
||||
components[-1] = self._parse_tzstr(timestr[pos:])
|
||||
pos = len_str
|
||||
break
|
||||
|
||||
if comp == 1 and timestr[pos:pos+1] == self._TIME_SEP:
|
||||
has_sep = True
|
||||
pos += 1
|
||||
elif comp == 2 and has_sep:
|
||||
if timestr[pos:pos+1] != self._TIME_SEP:
|
||||
raise ValueError('Inconsistent use of colon separator')
|
||||
pos += 1
|
||||
|
||||
if comp < 3:
|
||||
# Hour, minute, second
|
||||
components[comp] = int(timestr[pos:pos + 2])
|
||||
pos += 2
|
||||
|
||||
if comp == 3:
|
||||
# Fraction of a second
|
||||
frac = self._FRACTION_REGEX.match(timestr[pos:])
|
||||
if not frac:
|
||||
continue
|
||||
|
||||
us_str = frac.group(1)[:6] # Truncate to microseconds
|
||||
components[comp] = int(us_str) * 10**(6 - len(us_str))
|
||||
pos += len(frac.group())
|
||||
|
||||
if pos < len_str:
|
||||
raise ValueError('Unused components in ISO string')
|
||||
|
||||
if components[0] == 24:
|
||||
# Standard supports 00:00 and 24:00 as representations of midnight
|
||||
if any(component != 0 for component in components[1:4]):
|
||||
raise ValueError('Hour may only be 24 at 24:00:00.000')
|
||||
|
||||
return components
|
||||
|
||||
def _parse_tzstr(self, tzstr, zero_as_utc=True):
|
||||
if tzstr == b'Z' or tzstr == b'z':
|
||||
return tz.UTC
|
||||
|
||||
if len(tzstr) not in {3, 5, 6}:
|
||||
raise ValueError('Time zone offset must be 1, 3, 5 or 6 characters')
|
||||
|
||||
if tzstr[0:1] == b'-':
|
||||
mult = -1
|
||||
elif tzstr[0:1] == b'+':
|
||||
mult = 1
|
||||
else:
|
||||
raise ValueError('Time zone offset requires sign')
|
||||
|
||||
hours = int(tzstr[1:3])
|
||||
if len(tzstr) == 3:
|
||||
minutes = 0
|
||||
else:
|
||||
minutes = int(tzstr[(4 if tzstr[3:4] == self._TIME_SEP else 3):])
|
||||
|
||||
if zero_as_utc and hours == 0 and minutes == 0:
|
||||
return tz.UTC
|
||||
else:
|
||||
if minutes > 59:
|
||||
raise ValueError('Invalid minutes in time zone offset')
|
||||
|
||||
if hours > 23:
|
||||
raise ValueError('Invalid hours in time zone offset')
|
||||
|
||||
return tz.tzoffset(None, mult * (hours * 60 + minutes) * 60)
|
||||
|
||||
|
||||
DEFAULT_ISOPARSER = isoparser()
|
||||
isoparse = DEFAULT_ISOPARSER.isoparse
|
599
.local/lib/python3.8/site-packages/dateutil/relativedelta.py
Normal file
599
.local/lib/python3.8/site-packages/dateutil/relativedelta.py
Normal file
@ -0,0 +1,599 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import calendar
|
||||
|
||||
import operator
|
||||
from math import copysign
|
||||
|
||||
from six import integer_types
|
||||
from warnings import warn
|
||||
|
||||
from ._common import weekday
|
||||
|
||||
MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7))
|
||||
|
||||
__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"]
|
||||
|
||||
|
||||
class relativedelta(object):
|
||||
"""
|
||||
The relativedelta type is designed to be applied to an existing datetime and
|
||||
can replace specific components of that datetime, or represents an interval
|
||||
of time.
|
||||
|
||||
It is based on the specification of the excellent work done by M.-A. Lemburg
|
||||
in his
|
||||
`mx.DateTime <https://www.egenix.com/products/python/mxBase/mxDateTime/>`_ extension.
|
||||
However, notice that this type does *NOT* implement the same algorithm as
|
||||
his work. Do *NOT* expect it to behave like mx.DateTime's counterpart.
|
||||
|
||||
There are two different ways to build a relativedelta instance. The
|
||||
first one is passing it two date/datetime classes::
|
||||
|
||||
relativedelta(datetime1, datetime2)
|
||||
|
||||
The second one is passing it any number of the following keyword arguments::
|
||||
|
||||
relativedelta(arg1=x,arg2=y,arg3=z...)
|
||||
|
||||
year, month, day, hour, minute, second, microsecond:
|
||||
Absolute information (argument is singular); adding or subtracting a
|
||||
relativedelta with absolute information does not perform an arithmetic
|
||||
operation, but rather REPLACES the corresponding value in the
|
||||
original datetime with the value(s) in relativedelta.
|
||||
|
||||
years, months, weeks, days, hours, minutes, seconds, microseconds:
|
||||
Relative information, may be negative (argument is plural); adding
|
||||
or subtracting a relativedelta with relative information performs
|
||||
the corresponding arithmetic operation on the original datetime value
|
||||
with the information in the relativedelta.
|
||||
|
||||
weekday:
|
||||
One of the weekday instances (MO, TU, etc) available in the
|
||||
relativedelta module. These instances may receive a parameter N,
|
||||
specifying the Nth weekday, which could be positive or negative
|
||||
(like MO(+1) or MO(-2)). Not specifying it is the same as specifying
|
||||
+1. You can also use an integer, where 0=MO. This argument is always
|
||||
relative e.g. if the calculated date is already Monday, using MO(1)
|
||||
or MO(-1) won't change the day. To effectively make it absolute, use
|
||||
it in combination with the day argument (e.g. day=1, MO(1) for first
|
||||
Monday of the month).
|
||||
|
||||
leapdays:
|
||||
Will add given days to the date found, if year is a leap
|
||||
year, and the date found is post 28 of february.
|
||||
|
||||
yearday, nlyearday:
|
||||
Set the yearday or the non-leap year day (jump leap days).
|
||||
These are converted to day/month/leapdays information.
|
||||
|
||||
There are relative and absolute forms of the keyword
|
||||
arguments. The plural is relative, and the singular is
|
||||
absolute. For each argument in the order below, the absolute form
|
||||
is applied first (by setting each attribute to that value) and
|
||||
then the relative form (by adding the value to the attribute).
|
||||
|
||||
The order of attributes considered when this relativedelta is
|
||||
added to a datetime is:
|
||||
|
||||
1. Year
|
||||
2. Month
|
||||
3. Day
|
||||
4. Hours
|
||||
5. Minutes
|
||||
6. Seconds
|
||||
7. Microseconds
|
||||
|
||||
Finally, weekday is applied, using the rule described above.
|
||||
|
||||
For example
|
||||
|
||||
>>> from datetime import datetime
|
||||
>>> from dateutil.relativedelta import relativedelta, MO
|
||||
>>> dt = datetime(2018, 4, 9, 13, 37, 0)
|
||||
>>> delta = relativedelta(hours=25, day=1, weekday=MO(1))
|
||||
>>> dt + delta
|
||||
datetime.datetime(2018, 4, 2, 14, 37)
|
||||
|
||||
First, the day is set to 1 (the first of the month), then 25 hours
|
||||
are added, to get to the 2nd day and 14th hour, finally the
|
||||
weekday is applied, but since the 2nd is already a Monday there is
|
||||
no effect.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, dt1=None, dt2=None,
|
||||
years=0, months=0, days=0, leapdays=0, weeks=0,
|
||||
hours=0, minutes=0, seconds=0, microseconds=0,
|
||||
year=None, month=None, day=None, weekday=None,
|
||||
yearday=None, nlyearday=None,
|
||||
hour=None, minute=None, second=None, microsecond=None):
|
||||
|
||||
if dt1 and dt2:
|
||||
# datetime is a subclass of date. So both must be date
|
||||
if not (isinstance(dt1, datetime.date) and
|
||||
isinstance(dt2, datetime.date)):
|
||||
raise TypeError("relativedelta only diffs datetime/date")
|
||||
|
||||
# We allow two dates, or two datetimes, so we coerce them to be
|
||||
# of the same type
|
||||
if (isinstance(dt1, datetime.datetime) !=
|
||||
isinstance(dt2, datetime.datetime)):
|
||||
if not isinstance(dt1, datetime.datetime):
|
||||
dt1 = datetime.datetime.fromordinal(dt1.toordinal())
|
||||
elif not isinstance(dt2, datetime.datetime):
|
||||
dt2 = datetime.datetime.fromordinal(dt2.toordinal())
|
||||
|
||||
self.years = 0
|
||||
self.months = 0
|
||||
self.days = 0
|
||||
self.leapdays = 0
|
||||
self.hours = 0
|
||||
self.minutes = 0
|
||||
self.seconds = 0
|
||||
self.microseconds = 0
|
||||
self.year = None
|
||||
self.month = None
|
||||
self.day = None
|
||||
self.weekday = None
|
||||
self.hour = None
|
||||
self.minute = None
|
||||
self.second = None
|
||||
self.microsecond = None
|
||||
self._has_time = 0
|
||||
|
||||
# Get year / month delta between the two
|
||||
months = (dt1.year - dt2.year) * 12 + (dt1.month - dt2.month)
|
||||
self._set_months(months)
|
||||
|
||||
# Remove the year/month delta so the timedelta is just well-defined
|
||||
# time units (seconds, days and microseconds)
|
||||
dtm = self.__radd__(dt2)
|
||||
|
||||
# If we've overshot our target, make an adjustment
|
||||
if dt1 < dt2:
|
||||
compare = operator.gt
|
||||
increment = 1
|
||||
else:
|
||||
compare = operator.lt
|
||||
increment = -1
|
||||
|
||||
while compare(dt1, dtm):
|
||||
months += increment
|
||||
self._set_months(months)
|
||||
dtm = self.__radd__(dt2)
|
||||
|
||||
# Get the timedelta between the "months-adjusted" date and dt1
|
||||
delta = dt1 - dtm
|
||||
self.seconds = delta.seconds + delta.days * 86400
|
||||
self.microseconds = delta.microseconds
|
||||
else:
|
||||
# Check for non-integer values in integer-only quantities
|
||||
if any(x is not None and x != int(x) for x in (years, months)):
|
||||
raise ValueError("Non-integer years and months are "
|
||||
"ambiguous and not currently supported.")
|
||||
|
||||
# Relative information
|
||||
self.years = int(years)
|
||||
self.months = int(months)
|
||||
self.days = days + weeks * 7
|
||||
self.leapdays = leapdays
|
||||
self.hours = hours
|
||||
self.minutes = minutes
|
||||
self.seconds = seconds
|
||||
self.microseconds = microseconds
|
||||
|
||||
# Absolute information
|
||||
self.year = year
|
||||
self.month = month
|
||||
self.day = day
|
||||
self.hour = hour
|
||||
self.minute = minute
|
||||
self.second = second
|
||||
self.microsecond = microsecond
|
||||
|
||||
if any(x is not None and int(x) != x
|
||||
for x in (year, month, day, hour,
|
||||
minute, second, microsecond)):
|
||||
# For now we'll deprecate floats - later it'll be an error.
|
||||
warn("Non-integer value passed as absolute information. " +
|
||||
"This is not a well-defined condition and will raise " +
|
||||
"errors in future versions.", DeprecationWarning)
|
||||
|
||||
if isinstance(weekday, integer_types):
|
||||
self.weekday = weekdays[weekday]
|
||||
else:
|
||||
self.weekday = weekday
|
||||
|
||||
yday = 0
|
||||
if nlyearday:
|
||||
yday = nlyearday
|
||||
elif yearday:
|
||||
yday = yearday
|
||||
if yearday > 59:
|
||||
self.leapdays = -1
|
||||
if yday:
|
||||
ydayidx = [31, 59, 90, 120, 151, 181, 212,
|
||||
243, 273, 304, 334, 366]
|
||||
for idx, ydays in enumerate(ydayidx):
|
||||
if yday <= ydays:
|
||||
self.month = idx+1
|
||||
if idx == 0:
|
||||
self.day = yday
|
||||
else:
|
||||
self.day = yday-ydayidx[idx-1]
|
||||
break
|
||||
else:
|
||||
raise ValueError("invalid year day (%d)" % yday)
|
||||
|
||||
self._fix()
|
||||
|
||||
def _fix(self):
|
||||
if abs(self.microseconds) > 999999:
|
||||
s = _sign(self.microseconds)
|
||||
div, mod = divmod(self.microseconds * s, 1000000)
|
||||
self.microseconds = mod * s
|
||||
self.seconds += div * s
|
||||
if abs(self.seconds) > 59:
|
||||
s = _sign(self.seconds)
|
||||
div, mod = divmod(self.seconds * s, 60)
|
||||
self.seconds = mod * s
|
||||
self.minutes += div * s
|
||||
if abs(self.minutes) > 59:
|
||||
s = _sign(self.minutes)
|
||||
div, mod = divmod(self.minutes * s, 60)
|
||||
self.minutes = mod * s
|
||||
self.hours += div * s
|
||||
if abs(self.hours) > 23:
|
||||
s = _sign(self.hours)
|
||||
div, mod = divmod(self.hours * s, 24)
|
||||
self.hours = mod * s
|
||||
self.days += div * s
|
||||
if abs(self.months) > 11:
|
||||
s = _sign(self.months)
|
||||
div, mod = divmod(self.months * s, 12)
|
||||
self.months = mod * s
|
||||
self.years += div * s
|
||||
if (self.hours or self.minutes or self.seconds or self.microseconds
|
||||
or self.hour is not None or self.minute is not None or
|
||||
self.second is not None or self.microsecond is not None):
|
||||
self._has_time = 1
|
||||
else:
|
||||
self._has_time = 0
|
||||
|
||||
@property
|
||||
def weeks(self):
|
||||
return int(self.days / 7.0)
|
||||
|
||||
@weeks.setter
|
||||
def weeks(self, value):
|
||||
self.days = self.days - (self.weeks * 7) + value * 7
|
||||
|
||||
def _set_months(self, months):
|
||||
self.months = months
|
||||
if abs(self.months) > 11:
|
||||
s = _sign(self.months)
|
||||
div, mod = divmod(self.months * s, 12)
|
||||
self.months = mod * s
|
||||
self.years = div * s
|
||||
else:
|
||||
self.years = 0
|
||||
|
||||
def normalized(self):
|
||||
"""
|
||||
Return a version of this object represented entirely using integer
|
||||
values for the relative attributes.
|
||||
|
||||
>>> relativedelta(days=1.5, hours=2).normalized()
|
||||
relativedelta(days=+1, hours=+14)
|
||||
|
||||
:return:
|
||||
Returns a :class:`dateutil.relativedelta.relativedelta` object.
|
||||
"""
|
||||
# Cascade remainders down (rounding each to roughly nearest microsecond)
|
||||
days = int(self.days)
|
||||
|
||||
hours_f = round(self.hours + 24 * (self.days - days), 11)
|
||||
hours = int(hours_f)
|
||||
|
||||
minutes_f = round(self.minutes + 60 * (hours_f - hours), 10)
|
||||
minutes = int(minutes_f)
|
||||
|
||||
seconds_f = round(self.seconds + 60 * (minutes_f - minutes), 8)
|
||||
seconds = int(seconds_f)
|
||||
|
||||
microseconds = round(self.microseconds + 1e6 * (seconds_f - seconds))
|
||||
|
||||
# Constructor carries overflow back up with call to _fix()
|
||||
return self.__class__(years=self.years, months=self.months,
|
||||
days=days, hours=hours, minutes=minutes,
|
||||
seconds=seconds, microseconds=microseconds,
|
||||
leapdays=self.leapdays, year=self.year,
|
||||
month=self.month, day=self.day,
|
||||
weekday=self.weekday, hour=self.hour,
|
||||
minute=self.minute, second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
|
||||
def __add__(self, other):
|
||||
if isinstance(other, relativedelta):
|
||||
return self.__class__(years=other.years + self.years,
|
||||
months=other.months + self.months,
|
||||
days=other.days + self.days,
|
||||
hours=other.hours + self.hours,
|
||||
minutes=other.minutes + self.minutes,
|
||||
seconds=other.seconds + self.seconds,
|
||||
microseconds=(other.microseconds +
|
||||
self.microseconds),
|
||||
leapdays=other.leapdays or self.leapdays,
|
||||
year=(other.year if other.year is not None
|
||||
else self.year),
|
||||
month=(other.month if other.month is not None
|
||||
else self.month),
|
||||
day=(other.day if other.day is not None
|
||||
else self.day),
|
||||
weekday=(other.weekday if other.weekday is not None
|
||||
else self.weekday),
|
||||
hour=(other.hour if other.hour is not None
|
||||
else self.hour),
|
||||
minute=(other.minute if other.minute is not None
|
||||
else self.minute),
|
||||
second=(other.second if other.second is not None
|
||||
else self.second),
|
||||
microsecond=(other.microsecond if other.microsecond
|
||||
is not None else
|
||||
self.microsecond))
|
||||
if isinstance(other, datetime.timedelta):
|
||||
return self.__class__(years=self.years,
|
||||
months=self.months,
|
||||
days=self.days + other.days,
|
||||
hours=self.hours,
|
||||
minutes=self.minutes,
|
||||
seconds=self.seconds + other.seconds,
|
||||
microseconds=self.microseconds + other.microseconds,
|
||||
leapdays=self.leapdays,
|
||||
year=self.year,
|
||||
month=self.month,
|
||||
day=self.day,
|
||||
weekday=self.weekday,
|
||||
hour=self.hour,
|
||||
minute=self.minute,
|
||||
second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
if not isinstance(other, datetime.date):
|
||||
return NotImplemented
|
||||
elif self._has_time and not isinstance(other, datetime.datetime):
|
||||
other = datetime.datetime.fromordinal(other.toordinal())
|
||||
year = (self.year or other.year)+self.years
|
||||
month = self.month or other.month
|
||||
if self.months:
|
||||
assert 1 <= abs(self.months) <= 12
|
||||
month += self.months
|
||||
if month > 12:
|
||||
year += 1
|
||||
month -= 12
|
||||
elif month < 1:
|
||||
year -= 1
|
||||
month += 12
|
||||
day = min(calendar.monthrange(year, month)[1],
|
||||
self.day or other.day)
|
||||
repl = {"year": year, "month": month, "day": day}
|
||||
for attr in ["hour", "minute", "second", "microsecond"]:
|
||||
value = getattr(self, attr)
|
||||
if value is not None:
|
||||
repl[attr] = value
|
||||
days = self.days
|
||||
if self.leapdays and month > 2 and calendar.isleap(year):
|
||||
days += self.leapdays
|
||||
ret = (other.replace(**repl)
|
||||
+ datetime.timedelta(days=days,
|
||||
hours=self.hours,
|
||||
minutes=self.minutes,
|
||||
seconds=self.seconds,
|
||||
microseconds=self.microseconds))
|
||||
if self.weekday:
|
||||
weekday, nth = self.weekday.weekday, self.weekday.n or 1
|
||||
jumpdays = (abs(nth) - 1) * 7
|
||||
if nth > 0:
|
||||
jumpdays += (7 - ret.weekday() + weekday) % 7
|
||||
else:
|
||||
jumpdays += (ret.weekday() - weekday) % 7
|
||||
jumpdays *= -1
|
||||
ret += datetime.timedelta(days=jumpdays)
|
||||
return ret
|
||||
|
||||
def __radd__(self, other):
|
||||
return self.__add__(other)
|
||||
|
||||
def __rsub__(self, other):
|
||||
return self.__neg__().__radd__(other)
|
||||
|
||||
def __sub__(self, other):
|
||||
if not isinstance(other, relativedelta):
|
||||
return NotImplemented # In case the other object defines __rsub__
|
||||
return self.__class__(years=self.years - other.years,
|
||||
months=self.months - other.months,
|
||||
days=self.days - other.days,
|
||||
hours=self.hours - other.hours,
|
||||
minutes=self.minutes - other.minutes,
|
||||
seconds=self.seconds - other.seconds,
|
||||
microseconds=self.microseconds - other.microseconds,
|
||||
leapdays=self.leapdays or other.leapdays,
|
||||
year=(self.year if self.year is not None
|
||||
else other.year),
|
||||
month=(self.month if self.month is not None else
|
||||
other.month),
|
||||
day=(self.day if self.day is not None else
|
||||
other.day),
|
||||
weekday=(self.weekday if self.weekday is not None else
|
||||
other.weekday),
|
||||
hour=(self.hour if self.hour is not None else
|
||||
other.hour),
|
||||
minute=(self.minute if self.minute is not None else
|
||||
other.minute),
|
||||
second=(self.second if self.second is not None else
|
||||
other.second),
|
||||
microsecond=(self.microsecond if self.microsecond
|
||||
is not None else
|
||||
other.microsecond))
|
||||
|
||||
def __abs__(self):
|
||||
return self.__class__(years=abs(self.years),
|
||||
months=abs(self.months),
|
||||
days=abs(self.days),
|
||||
hours=abs(self.hours),
|
||||
minutes=abs(self.minutes),
|
||||
seconds=abs(self.seconds),
|
||||
microseconds=abs(self.microseconds),
|
||||
leapdays=self.leapdays,
|
||||
year=self.year,
|
||||
month=self.month,
|
||||
day=self.day,
|
||||
weekday=self.weekday,
|
||||
hour=self.hour,
|
||||
minute=self.minute,
|
||||
second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
|
||||
def __neg__(self):
|
||||
return self.__class__(years=-self.years,
|
||||
months=-self.months,
|
||||
days=-self.days,
|
||||
hours=-self.hours,
|
||||
minutes=-self.minutes,
|
||||
seconds=-self.seconds,
|
||||
microseconds=-self.microseconds,
|
||||
leapdays=self.leapdays,
|
||||
year=self.year,
|
||||
month=self.month,
|
||||
day=self.day,
|
||||
weekday=self.weekday,
|
||||
hour=self.hour,
|
||||
minute=self.minute,
|
||||
second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
|
||||
def __bool__(self):
|
||||
return not (not self.years and
|
||||
not self.months and
|
||||
not self.days and
|
||||
not self.hours and
|
||||
not self.minutes and
|
||||
not self.seconds and
|
||||
not self.microseconds and
|
||||
not self.leapdays and
|
||||
self.year is None and
|
||||
self.month is None and
|
||||
self.day is None and
|
||||
self.weekday is None and
|
||||
self.hour is None and
|
||||
self.minute is None and
|
||||
self.second is None and
|
||||
self.microsecond is None)
|
||||
# Compatibility with Python 2.x
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def __mul__(self, other):
|
||||
try:
|
||||
f = float(other)
|
||||
except TypeError:
|
||||
return NotImplemented
|
||||
|
||||
return self.__class__(years=int(self.years * f),
|
||||
months=int(self.months * f),
|
||||
days=int(self.days * f),
|
||||
hours=int(self.hours * f),
|
||||
minutes=int(self.minutes * f),
|
||||
seconds=int(self.seconds * f),
|
||||
microseconds=int(self.microseconds * f),
|
||||
leapdays=self.leapdays,
|
||||
year=self.year,
|
||||
month=self.month,
|
||||
day=self.day,
|
||||
weekday=self.weekday,
|
||||
hour=self.hour,
|
||||
minute=self.minute,
|
||||
second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
|
||||
__rmul__ = __mul__
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, relativedelta):
|
||||
return NotImplemented
|
||||
if self.weekday or other.weekday:
|
||||
if not self.weekday or not other.weekday:
|
||||
return False
|
||||
if self.weekday.weekday != other.weekday.weekday:
|
||||
return False
|
||||
n1, n2 = self.weekday.n, other.weekday.n
|
||||
if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)):
|
||||
return False
|
||||
return (self.years == other.years and
|
||||
self.months == other.months and
|
||||
self.days == other.days and
|
||||
self.hours == other.hours and
|
||||
self.minutes == other.minutes and
|
||||
self.seconds == other.seconds and
|
||||
self.microseconds == other.microseconds and
|
||||
self.leapdays == other.leapdays and
|
||||
self.year == other.year and
|
||||
self.month == other.month and
|
||||
self.day == other.day and
|
||||
self.hour == other.hour and
|
||||
self.minute == other.minute and
|
||||
self.second == other.second and
|
||||
self.microsecond == other.microsecond)
|
||||
|
||||
def __hash__(self):
|
||||
return hash((
|
||||
self.weekday,
|
||||
self.years,
|
||||
self.months,
|
||||
self.days,
|
||||
self.hours,
|
||||
self.minutes,
|
||||
self.seconds,
|
||||
self.microseconds,
|
||||
self.leapdays,
|
||||
self.year,
|
||||
self.month,
|
||||
self.day,
|
||||
self.hour,
|
||||
self.minute,
|
||||
self.second,
|
||||
self.microsecond,
|
||||
))
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __div__(self, other):
|
||||
try:
|
||||
reciprocal = 1 / float(other)
|
||||
except TypeError:
|
||||
return NotImplemented
|
||||
|
||||
return self.__mul__(reciprocal)
|
||||
|
||||
__truediv__ = __div__
|
||||
|
||||
def __repr__(self):
|
||||
l = []
|
||||
for attr in ["years", "months", "days", "leapdays",
|
||||
"hours", "minutes", "seconds", "microseconds"]:
|
||||
value = getattr(self, attr)
|
||||
if value:
|
||||
l.append("{attr}={value:+g}".format(attr=attr, value=value))
|
||||
for attr in ["year", "month", "day", "weekday",
|
||||
"hour", "minute", "second", "microsecond"]:
|
||||
value = getattr(self, attr)
|
||||
if value is not None:
|
||||
l.append("{attr}={value}".format(attr=attr, value=repr(value)))
|
||||
return "{classname}({attrs})".format(classname=self.__class__.__name__,
|
||||
attrs=", ".join(l))
|
||||
|
||||
|
||||
def _sign(x):
|
||||
return int(copysign(1, x))
|
||||
|
||||
# vim:ts=4:sw=4:et
|
1737
.local/lib/python3.8/site-packages/dateutil/rrule.py
Normal file
1737
.local/lib/python3.8/site-packages/dateutil/rrule.py
Normal file
File diff suppressed because it is too large
Load Diff
12
.local/lib/python3.8/site-packages/dateutil/tz/__init__.py
Normal file
12
.local/lib/python3.8/site-packages/dateutil/tz/__init__.py
Normal file
@ -0,0 +1,12 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from .tz import *
|
||||
from .tz import __doc__
|
||||
|
||||
__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange",
|
||||
"tzstr", "tzical", "tzwin", "tzwinlocal", "gettz",
|
||||
"enfold", "datetime_ambiguous", "datetime_exists",
|
||||
"resolve_imaginary", "UTC", "DeprecatedTzFormatWarning"]
|
||||
|
||||
|
||||
class DeprecatedTzFormatWarning(Warning):
|
||||
"""Warning raised when time zones are parsed from deprecated formats."""
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
419
.local/lib/python3.8/site-packages/dateutil/tz/_common.py
Normal file
419
.local/lib/python3.8/site-packages/dateutil/tz/_common.py
Normal file
@ -0,0 +1,419 @@
|
||||
from six import PY2
|
||||
|
||||
from functools import wraps
|
||||
|
||||
from datetime import datetime, timedelta, tzinfo
|
||||
|
||||
|
||||
ZERO = timedelta(0)
|
||||
|
||||
__all__ = ['tzname_in_python2', 'enfold']
|
||||
|
||||
|
||||
def tzname_in_python2(namefunc):
|
||||
"""Change unicode output into bytestrings in Python 2
|
||||
|
||||
tzname() API changed in Python 3. It used to return bytes, but was changed
|
||||
to unicode strings
|
||||
"""
|
||||
if PY2:
|
||||
@wraps(namefunc)
|
||||
def adjust_encoding(*args, **kwargs):
|
||||
name = namefunc(*args, **kwargs)
|
||||
if name is not None:
|
||||
name = name.encode()
|
||||
|
||||
return name
|
||||
|
||||
return adjust_encoding
|
||||
else:
|
||||
return namefunc
|
||||
|
||||
|
||||
# The following is adapted from Alexander Belopolsky's tz library
|
||||
# https://github.com/abalkin/tz
|
||||
if hasattr(datetime, 'fold'):
|
||||
# This is the pre-python 3.6 fold situation
|
||||
def enfold(dt, fold=1):
|
||||
"""
|
||||
Provides a unified interface for assigning the ``fold`` attribute to
|
||||
datetimes both before and after the implementation of PEP-495.
|
||||
|
||||
:param fold:
|
||||
The value for the ``fold`` attribute in the returned datetime. This
|
||||
should be either 0 or 1.
|
||||
|
||||
:return:
|
||||
Returns an object for which ``getattr(dt, 'fold', 0)`` returns
|
||||
``fold`` for all versions of Python. In versions prior to
|
||||
Python 3.6, this is a ``_DatetimeWithFold`` object, which is a
|
||||
subclass of :py:class:`datetime.datetime` with the ``fold``
|
||||
attribute added, if ``fold`` is 1.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
return dt.replace(fold=fold)
|
||||
|
||||
else:
|
||||
class _DatetimeWithFold(datetime):
|
||||
"""
|
||||
This is a class designed to provide a PEP 495-compliant interface for
|
||||
Python versions before 3.6. It is used only for dates in a fold, so
|
||||
the ``fold`` attribute is fixed at ``1``.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
def replace(self, *args, **kwargs):
|
||||
"""
|
||||
Return a datetime with the same attributes, except for those
|
||||
attributes given new values by whichever keyword arguments are
|
||||
specified. Note that tzinfo=None can be specified to create a naive
|
||||
datetime from an aware datetime with no conversion of date and time
|
||||
data.
|
||||
|
||||
This is reimplemented in ``_DatetimeWithFold`` because pypy3 will
|
||||
return a ``datetime.datetime`` even if ``fold`` is unchanged.
|
||||
"""
|
||||
argnames = (
|
||||
'year', 'month', 'day', 'hour', 'minute', 'second',
|
||||
'microsecond', 'tzinfo'
|
||||
)
|
||||
|
||||
for arg, argname in zip(args, argnames):
|
||||
if argname in kwargs:
|
||||
raise TypeError('Duplicate argument: {}'.format(argname))
|
||||
|
||||
kwargs[argname] = arg
|
||||
|
||||
for argname in argnames:
|
||||
if argname not in kwargs:
|
||||
kwargs[argname] = getattr(self, argname)
|
||||
|
||||
dt_class = self.__class__ if kwargs.get('fold', 1) else datetime
|
||||
|
||||
return dt_class(**kwargs)
|
||||
|
||||
@property
|
||||
def fold(self):
|
||||
return 1
|
||||
|
||||
def enfold(dt, fold=1):
|
||||
"""
|
||||
Provides a unified interface for assigning the ``fold`` attribute to
|
||||
datetimes both before and after the implementation of PEP-495.
|
||||
|
||||
:param fold:
|
||||
The value for the ``fold`` attribute in the returned datetime. This
|
||||
should be either 0 or 1.
|
||||
|
||||
:return:
|
||||
Returns an object for which ``getattr(dt, 'fold', 0)`` returns
|
||||
``fold`` for all versions of Python. In versions prior to
|
||||
Python 3.6, this is a ``_DatetimeWithFold`` object, which is a
|
||||
subclass of :py:class:`datetime.datetime` with the ``fold``
|
||||
attribute added, if ``fold`` is 1.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
if getattr(dt, 'fold', 0) == fold:
|
||||
return dt
|
||||
|
||||
args = dt.timetuple()[:6]
|
||||
args += (dt.microsecond, dt.tzinfo)
|
||||
|
||||
if fold:
|
||||
return _DatetimeWithFold(*args)
|
||||
else:
|
||||
return datetime(*args)
|
||||
|
||||
|
||||
def _validate_fromutc_inputs(f):
|
||||
"""
|
||||
The CPython version of ``fromutc`` checks that the input is a ``datetime``
|
||||
object and that ``self`` is attached as its ``tzinfo``.
|
||||
"""
|
||||
@wraps(f)
|
||||
def fromutc(self, dt):
|
||||
if not isinstance(dt, datetime):
|
||||
raise TypeError("fromutc() requires a datetime argument")
|
||||
if dt.tzinfo is not self:
|
||||
raise ValueError("dt.tzinfo is not self")
|
||||
|
||||
return f(self, dt)
|
||||
|
||||
return fromutc
|
||||
|
||||
|
||||
class _tzinfo(tzinfo):
|
||||
"""
|
||||
Base class for all ``dateutil`` ``tzinfo`` objects.
|
||||
"""
|
||||
|
||||
def is_ambiguous(self, dt):
|
||||
"""
|
||||
Whether or not the "wall time" of a given datetime is ambiguous in this
|
||||
zone.
|
||||
|
||||
:param dt:
|
||||
A :py:class:`datetime.datetime`, naive or time zone aware.
|
||||
|
||||
|
||||
:return:
|
||||
Returns ``True`` if ambiguous, ``False`` otherwise.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
|
||||
dt = dt.replace(tzinfo=self)
|
||||
|
||||
wall_0 = enfold(dt, fold=0)
|
||||
wall_1 = enfold(dt, fold=1)
|
||||
|
||||
same_offset = wall_0.utcoffset() == wall_1.utcoffset()
|
||||
same_dt = wall_0.replace(tzinfo=None) == wall_1.replace(tzinfo=None)
|
||||
|
||||
return same_dt and not same_offset
|
||||
|
||||
def _fold_status(self, dt_utc, dt_wall):
|
||||
"""
|
||||
Determine the fold status of a "wall" datetime, given a representation
|
||||
of the same datetime as a (naive) UTC datetime. This is calculated based
|
||||
on the assumption that ``dt.utcoffset() - dt.dst()`` is constant for all
|
||||
datetimes, and that this offset is the actual number of hours separating
|
||||
``dt_utc`` and ``dt_wall``.
|
||||
|
||||
:param dt_utc:
|
||||
Representation of the datetime as UTC
|
||||
|
||||
:param dt_wall:
|
||||
Representation of the datetime as "wall time". This parameter must
|
||||
either have a `fold` attribute or have a fold-naive
|
||||
:class:`datetime.tzinfo` attached, otherwise the calculation may
|
||||
fail.
|
||||
"""
|
||||
if self.is_ambiguous(dt_wall):
|
||||
delta_wall = dt_wall - dt_utc
|
||||
_fold = int(delta_wall == (dt_utc.utcoffset() - dt_utc.dst()))
|
||||
else:
|
||||
_fold = 0
|
||||
|
||||
return _fold
|
||||
|
||||
def _fold(self, dt):
|
||||
return getattr(dt, 'fold', 0)
|
||||
|
||||
def _fromutc(self, dt):
|
||||
"""
|
||||
Given a timezone-aware datetime in a given timezone, calculates a
|
||||
timezone-aware datetime in a new timezone.
|
||||
|
||||
Since this is the one time that we *know* we have an unambiguous
|
||||
datetime object, we take this opportunity to determine whether the
|
||||
datetime is ambiguous and in a "fold" state (e.g. if it's the first
|
||||
occurrence, chronologically, of the ambiguous datetime).
|
||||
|
||||
:param dt:
|
||||
A timezone-aware :class:`datetime.datetime` object.
|
||||
"""
|
||||
|
||||
# Re-implement the algorithm from Python's datetime.py
|
||||
dtoff = dt.utcoffset()
|
||||
if dtoff is None:
|
||||
raise ValueError("fromutc() requires a non-None utcoffset() "
|
||||
"result")
|
||||
|
||||
# The original datetime.py code assumes that `dst()` defaults to
|
||||
# zero during ambiguous times. PEP 495 inverts this presumption, so
|
||||
# for pre-PEP 495 versions of python, we need to tweak the algorithm.
|
||||
dtdst = dt.dst()
|
||||
if dtdst is None:
|
||||
raise ValueError("fromutc() requires a non-None dst() result")
|
||||
delta = dtoff - dtdst
|
||||
|
||||
dt += delta
|
||||
# Set fold=1 so we can default to being in the fold for
|
||||
# ambiguous dates.
|
||||
dtdst = enfold(dt, fold=1).dst()
|
||||
if dtdst is None:
|
||||
raise ValueError("fromutc(): dt.dst gave inconsistent "
|
||||
"results; cannot convert")
|
||||
return dt + dtdst
|
||||
|
||||
@_validate_fromutc_inputs
|
||||
def fromutc(self, dt):
|
||||
"""
|
||||
Given a timezone-aware datetime in a given timezone, calculates a
|
||||
timezone-aware datetime in a new timezone.
|
||||
|
||||
Since this is the one time that we *know* we have an unambiguous
|
||||
datetime object, we take this opportunity to determine whether the
|
||||
datetime is ambiguous and in a "fold" state (e.g. if it's the first
|
||||
occurrence, chronologically, of the ambiguous datetime).
|
||||
|
||||
:param dt:
|
||||
A timezone-aware :class:`datetime.datetime` object.
|
||||
"""
|
||||
dt_wall = self._fromutc(dt)
|
||||
|
||||
# Calculate the fold status given the two datetimes.
|
||||
_fold = self._fold_status(dt, dt_wall)
|
||||
|
||||
# Set the default fold value for ambiguous dates
|
||||
return enfold(dt_wall, fold=_fold)
|
||||
|
||||
|
||||
class tzrangebase(_tzinfo):
|
||||
"""
|
||||
This is an abstract base class for time zones represented by an annual
|
||||
transition into and out of DST. Child classes should implement the following
|
||||
methods:
|
||||
|
||||
* ``__init__(self, *args, **kwargs)``
|
||||
* ``transitions(self, year)`` - this is expected to return a tuple of
|
||||
datetimes representing the DST on and off transitions in standard
|
||||
time.
|
||||
|
||||
A fully initialized ``tzrangebase`` subclass should also provide the
|
||||
following attributes:
|
||||
* ``hasdst``: Boolean whether or not the zone uses DST.
|
||||
* ``_dst_offset`` / ``_std_offset``: :class:`datetime.timedelta` objects
|
||||
representing the respective UTC offsets.
|
||||
* ``_dst_abbr`` / ``_std_abbr``: Strings representing the timezone short
|
||||
abbreviations in DST and STD, respectively.
|
||||
* ``_hasdst``: Whether or not the zone has DST.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
def __init__(self):
|
||||
raise NotImplementedError('tzrangebase is an abstract base class')
|
||||
|
||||
def utcoffset(self, dt):
|
||||
isdst = self._isdst(dt)
|
||||
|
||||
if isdst is None:
|
||||
return None
|
||||
elif isdst:
|
||||
return self._dst_offset
|
||||
else:
|
||||
return self._std_offset
|
||||
|
||||
def dst(self, dt):
|
||||
isdst = self._isdst(dt)
|
||||
|
||||
if isdst is None:
|
||||
return None
|
||||
elif isdst:
|
||||
return self._dst_base_offset
|
||||
else:
|
||||
return ZERO
|
||||
|
||||
@tzname_in_python2
|
||||
def tzname(self, dt):
|
||||
if self._isdst(dt):
|
||||
return self._dst_abbr
|
||||
else:
|
||||
return self._std_abbr
|
||||
|
||||
def fromutc(self, dt):
|
||||
""" Given a datetime in UTC, return local time """
|
||||
if not isinstance(dt, datetime):
|
||||
raise TypeError("fromutc() requires a datetime argument")
|
||||
|
||||
if dt.tzinfo is not self:
|
||||
raise ValueError("dt.tzinfo is not self")
|
||||
|
||||
# Get transitions - if there are none, fixed offset
|
||||
transitions = self.transitions(dt.year)
|
||||
if transitions is None:
|
||||
return dt + self.utcoffset(dt)
|
||||
|
||||
# Get the transition times in UTC
|
||||
dston, dstoff = transitions
|
||||
|
||||
dston -= self._std_offset
|
||||
dstoff -= self._std_offset
|
||||
|
||||
utc_transitions = (dston, dstoff)
|
||||
dt_utc = dt.replace(tzinfo=None)
|
||||
|
||||
isdst = self._naive_isdst(dt_utc, utc_transitions)
|
||||
|
||||
if isdst:
|
||||
dt_wall = dt + self._dst_offset
|
||||
else:
|
||||
dt_wall = dt + self._std_offset
|
||||
|
||||
_fold = int(not isdst and self.is_ambiguous(dt_wall))
|
||||
|
||||
return enfold(dt_wall, fold=_fold)
|
||||
|
||||
def is_ambiguous(self, dt):
|
||||
"""
|
||||
Whether or not the "wall time" of a given datetime is ambiguous in this
|
||||
zone.
|
||||
|
||||
:param dt:
|
||||
A :py:class:`datetime.datetime`, naive or time zone aware.
|
||||
|
||||
|
||||
:return:
|
||||
Returns ``True`` if ambiguous, ``False`` otherwise.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
if not self.hasdst:
|
||||
return False
|
||||
|
||||
start, end = self.transitions(dt.year)
|
||||
|
||||
dt = dt.replace(tzinfo=None)
|
||||
return (end <= dt < end + self._dst_base_offset)
|
||||
|
||||
def _isdst(self, dt):
|
||||
if not self.hasdst:
|
||||
return False
|
||||
elif dt is None:
|
||||
return None
|
||||
|
||||
transitions = self.transitions(dt.year)
|
||||
|
||||
if transitions is None:
|
||||
return False
|
||||
|
||||
dt = dt.replace(tzinfo=None)
|
||||
|
||||
isdst = self._naive_isdst(dt, transitions)
|
||||
|
||||
# Handle ambiguous dates
|
||||
if not isdst and self.is_ambiguous(dt):
|
||||
return not self._fold(dt)
|
||||
else:
|
||||
return isdst
|
||||
|
||||
def _naive_isdst(self, dt, transitions):
|
||||
dston, dstoff = transitions
|
||||
|
||||
dt = dt.replace(tzinfo=None)
|
||||
|
||||
if dston < dstoff:
|
||||
isdst = dston <= dt < dstoff
|
||||
else:
|
||||
isdst = not dstoff <= dt < dston
|
||||
|
||||
return isdst
|
||||
|
||||
@property
|
||||
def _dst_base_offset(self):
|
||||
return self._dst_offset - self._std_offset
|
||||
|
||||
__hash__ = None
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(...)" % self.__class__.__name__
|
||||
|
||||
__reduce__ = object.__reduce__
|
80
.local/lib/python3.8/site-packages/dateutil/tz/_factories.py
Normal file
80
.local/lib/python3.8/site-packages/dateutil/tz/_factories.py
Normal file
@ -0,0 +1,80 @@
|
||||
from datetime import timedelta
|
||||
import weakref
|
||||
from collections import OrderedDict
|
||||
|
||||
from six.moves import _thread
|
||||
|
||||
|
||||
class _TzSingleton(type):
|
||||
def __init__(cls, *args, **kwargs):
|
||||
cls.__instance = None
|
||||
super(_TzSingleton, cls).__init__(*args, **kwargs)
|
||||
|
||||
def __call__(cls):
|
||||
if cls.__instance is None:
|
||||
cls.__instance = super(_TzSingleton, cls).__call__()
|
||||
return cls.__instance
|
||||
|
||||
|
||||
class _TzFactory(type):
|
||||
def instance(cls, *args, **kwargs):
|
||||
"""Alternate constructor that returns a fresh instance"""
|
||||
return type.__call__(cls, *args, **kwargs)
|
||||
|
||||
|
||||
class _TzOffsetFactory(_TzFactory):
|
||||
def __init__(cls, *args, **kwargs):
|
||||
cls.__instances = weakref.WeakValueDictionary()
|
||||
cls.__strong_cache = OrderedDict()
|
||||
cls.__strong_cache_size = 8
|
||||
|
||||
cls._cache_lock = _thread.allocate_lock()
|
||||
|
||||
def __call__(cls, name, offset):
|
||||
if isinstance(offset, timedelta):
|
||||
key = (name, offset.total_seconds())
|
||||
else:
|
||||
key = (name, offset)
|
||||
|
||||
instance = cls.__instances.get(key, None)
|
||||
if instance is None:
|
||||
instance = cls.__instances.setdefault(key,
|
||||
cls.instance(name, offset))
|
||||
|
||||
# This lock may not be necessary in Python 3. See GH issue #901
|
||||
with cls._cache_lock:
|
||||
cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance)
|
||||
|
||||
# Remove an item if the strong cache is overpopulated
|
||||
if len(cls.__strong_cache) > cls.__strong_cache_size:
|
||||
cls.__strong_cache.popitem(last=False)
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
class _TzStrFactory(_TzFactory):
|
||||
def __init__(cls, *args, **kwargs):
|
||||
cls.__instances = weakref.WeakValueDictionary()
|
||||
cls.__strong_cache = OrderedDict()
|
||||
cls.__strong_cache_size = 8
|
||||
|
||||
cls.__cache_lock = _thread.allocate_lock()
|
||||
|
||||
def __call__(cls, s, posix_offset=False):
|
||||
key = (s, posix_offset)
|
||||
instance = cls.__instances.get(key, None)
|
||||
|
||||
if instance is None:
|
||||
instance = cls.__instances.setdefault(key,
|
||||
cls.instance(s, posix_offset))
|
||||
|
||||
# This lock may not be necessary in Python 3. See GH issue #901
|
||||
with cls.__cache_lock:
|
||||
cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance)
|
||||
|
||||
# Remove an item if the strong cache is overpopulated
|
||||
if len(cls.__strong_cache) > cls.__strong_cache_size:
|
||||
cls.__strong_cache.popitem(last=False)
|
||||
|
||||
return instance
|
||||
|
1849
.local/lib/python3.8/site-packages/dateutil/tz/tz.py
Normal file
1849
.local/lib/python3.8/site-packages/dateutil/tz/tz.py
Normal file
File diff suppressed because it is too large
Load Diff
370
.local/lib/python3.8/site-packages/dateutil/tz/win.py
Normal file
370
.local/lib/python3.8/site-packages/dateutil/tz/win.py
Normal file
@ -0,0 +1,370 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
This module provides an interface to the native time zone data on Windows,
|
||||
including :py:class:`datetime.tzinfo` implementations.
|
||||
|
||||
Attempting to import this module on a non-Windows platform will raise an
|
||||
:py:obj:`ImportError`.
|
||||
"""
|
||||
# This code was originally contributed by Jeffrey Harris.
|
||||
import datetime
|
||||
import struct
|
||||
|
||||
from six.moves import winreg
|
||||
from six import text_type
|
||||
|
||||
try:
|
||||
import ctypes
|
||||
from ctypes import wintypes
|
||||
except ValueError:
|
||||
# ValueError is raised on non-Windows systems for some horrible reason.
|
||||
raise ImportError("Running tzwin on non-Windows system")
|
||||
|
||||
from ._common import tzrangebase
|
||||
|
||||
__all__ = ["tzwin", "tzwinlocal", "tzres"]
|
||||
|
||||
ONEWEEK = datetime.timedelta(7)
|
||||
|
||||
TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones"
|
||||
TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones"
|
||||
TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation"
|
||||
|
||||
|
||||
def _settzkeyname():
|
||||
handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
|
||||
try:
|
||||
winreg.OpenKey(handle, TZKEYNAMENT).Close()
|
||||
TZKEYNAME = TZKEYNAMENT
|
||||
except WindowsError:
|
||||
TZKEYNAME = TZKEYNAME9X
|
||||
handle.Close()
|
||||
return TZKEYNAME
|
||||
|
||||
|
||||
TZKEYNAME = _settzkeyname()
|
||||
|
||||
|
||||
class tzres(object):
|
||||
"""
|
||||
Class for accessing ``tzres.dll``, which contains timezone name related
|
||||
resources.
|
||||
|
||||
.. versionadded:: 2.5.0
|
||||
"""
|
||||
p_wchar = ctypes.POINTER(wintypes.WCHAR) # Pointer to a wide char
|
||||
|
||||
def __init__(self, tzres_loc='tzres.dll'):
|
||||
# Load the user32 DLL so we can load strings from tzres
|
||||
user32 = ctypes.WinDLL('user32')
|
||||
|
||||
# Specify the LoadStringW function
|
||||
user32.LoadStringW.argtypes = (wintypes.HINSTANCE,
|
||||
wintypes.UINT,
|
||||
wintypes.LPWSTR,
|
||||
ctypes.c_int)
|
||||
|
||||
self.LoadStringW = user32.LoadStringW
|
||||
self._tzres = ctypes.WinDLL(tzres_loc)
|
||||
self.tzres_loc = tzres_loc
|
||||
|
||||
def load_name(self, offset):
|
||||
"""
|
||||
Load a timezone name from a DLL offset (integer).
|
||||
|
||||
>>> from dateutil.tzwin import tzres
|
||||
>>> tzr = tzres()
|
||||
>>> print(tzr.load_name(112))
|
||||
'Eastern Standard Time'
|
||||
|
||||
:param offset:
|
||||
A positive integer value referring to a string from the tzres dll.
|
||||
|
||||
.. note::
|
||||
|
||||
Offsets found in the registry are generally of the form
|
||||
``@tzres.dll,-114``. The offset in this case is 114, not -114.
|
||||
|
||||
"""
|
||||
resource = self.p_wchar()
|
||||
lpBuffer = ctypes.cast(ctypes.byref(resource), wintypes.LPWSTR)
|
||||
nchar = self.LoadStringW(self._tzres._handle, offset, lpBuffer, 0)
|
||||
return resource[:nchar]
|
||||
|
||||
def name_from_string(self, tzname_str):
|
||||
"""
|
||||
Parse strings as returned from the Windows registry into the time zone
|
||||
name as defined in the registry.
|
||||
|
||||
>>> from dateutil.tzwin import tzres
|
||||
>>> tzr = tzres()
|
||||
>>> print(tzr.name_from_string('@tzres.dll,-251'))
|
||||
'Dateline Daylight Time'
|
||||
>>> print(tzr.name_from_string('Eastern Standard Time'))
|
||||
'Eastern Standard Time'
|
||||
|
||||
:param tzname_str:
|
||||
A timezone name string as returned from a Windows registry key.
|
||||
|
||||
:return:
|
||||
Returns the localized timezone string from tzres.dll if the string
|
||||
is of the form `@tzres.dll,-offset`, else returns the input string.
|
||||
"""
|
||||
if not tzname_str.startswith('@'):
|
||||
return tzname_str
|
||||
|
||||
name_splt = tzname_str.split(',-')
|
||||
try:
|
||||
offset = int(name_splt[1])
|
||||
except:
|
||||
raise ValueError("Malformed timezone string.")
|
||||
|
||||
return self.load_name(offset)
|
||||
|
||||
|
||||
class tzwinbase(tzrangebase):
|
||||
"""tzinfo class based on win32's timezones available in the registry."""
|
||||
def __init__(self):
|
||||
raise NotImplementedError('tzwinbase is an abstract base class')
|
||||
|
||||
def __eq__(self, other):
|
||||
# Compare on all relevant dimensions, including name.
|
||||
if not isinstance(other, tzwinbase):
|
||||
return NotImplemented
|
||||
|
||||
return (self._std_offset == other._std_offset and
|
||||
self._dst_offset == other._dst_offset and
|
||||
self._stddayofweek == other._stddayofweek and
|
||||
self._dstdayofweek == other._dstdayofweek and
|
||||
self._stdweeknumber == other._stdweeknumber and
|
||||
self._dstweeknumber == other._dstweeknumber and
|
||||
self._stdhour == other._stdhour and
|
||||
self._dsthour == other._dsthour and
|
||||
self._stdminute == other._stdminute and
|
||||
self._dstminute == other._dstminute and
|
||||
self._std_abbr == other._std_abbr and
|
||||
self._dst_abbr == other._dst_abbr)
|
||||
|
||||
@staticmethod
|
||||
def list():
|
||||
"""Return a list of all time zones known to the system."""
|
||||
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
|
||||
with winreg.OpenKey(handle, TZKEYNAME) as tzkey:
|
||||
result = [winreg.EnumKey(tzkey, i)
|
||||
for i in range(winreg.QueryInfoKey(tzkey)[0])]
|
||||
return result
|
||||
|
||||
def display(self):
|
||||
"""
|
||||
Return the display name of the time zone.
|
||||
"""
|
||||
return self._display
|
||||
|
||||
def transitions(self, year):
|
||||
"""
|
||||
For a given year, get the DST on and off transition times, expressed
|
||||
always on the standard time side. For zones with no transitions, this
|
||||
function returns ``None``.
|
||||
|
||||
:param year:
|
||||
The year whose transitions you would like to query.
|
||||
|
||||
:return:
|
||||
Returns a :class:`tuple` of :class:`datetime.datetime` objects,
|
||||
``(dston, dstoff)`` for zones with an annual DST transition, or
|
||||
``None`` for fixed offset zones.
|
||||
"""
|
||||
|
||||
if not self.hasdst:
|
||||
return None
|
||||
|
||||
dston = picknthweekday(year, self._dstmonth, self._dstdayofweek,
|
||||
self._dsthour, self._dstminute,
|
||||
self._dstweeknumber)
|
||||
|
||||
dstoff = picknthweekday(year, self._stdmonth, self._stddayofweek,
|
||||
self._stdhour, self._stdminute,
|
||||
self._stdweeknumber)
|
||||
|
||||
# Ambiguous dates default to the STD side
|
||||
dstoff -= self._dst_base_offset
|
||||
|
||||
return dston, dstoff
|
||||
|
||||
def _get_hasdst(self):
|
||||
return self._dstmonth != 0
|
||||
|
||||
@property
|
||||
def _dst_base_offset(self):
|
||||
return self._dst_base_offset_
|
||||
|
||||
|
||||
class tzwin(tzwinbase):
|
||||
"""
|
||||
Time zone object created from the zone info in the Windows registry
|
||||
|
||||
These are similar to :py:class:`dateutil.tz.tzrange` objects in that
|
||||
the time zone data is provided in the format of a single offset rule
|
||||
for either 0 or 2 time zone transitions per year.
|
||||
|
||||
:param: name
|
||||
The name of a Windows time zone key, e.g. "Eastern Standard Time".
|
||||
The full list of keys can be retrieved with :func:`tzwin.list`.
|
||||
"""
|
||||
|
||||
def __init__(self, name):
|
||||
self._name = name
|
||||
|
||||
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
|
||||
tzkeyname = text_type("{kn}\\{name}").format(kn=TZKEYNAME, name=name)
|
||||
with winreg.OpenKey(handle, tzkeyname) as tzkey:
|
||||
keydict = valuestodict(tzkey)
|
||||
|
||||
self._std_abbr = keydict["Std"]
|
||||
self._dst_abbr = keydict["Dlt"]
|
||||
|
||||
self._display = keydict["Display"]
|
||||
|
||||
# See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm
|
||||
tup = struct.unpack("=3l16h", keydict["TZI"])
|
||||
stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1
|
||||
dstoffset = stdoffset-tup[2] # + DaylightBias * -1
|
||||
self._std_offset = datetime.timedelta(minutes=stdoffset)
|
||||
self._dst_offset = datetime.timedelta(minutes=dstoffset)
|
||||
|
||||
# for the meaning see the win32 TIME_ZONE_INFORMATION structure docs
|
||||
# http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx
|
||||
(self._stdmonth,
|
||||
self._stddayofweek, # Sunday = 0
|
||||
self._stdweeknumber, # Last = 5
|
||||
self._stdhour,
|
||||
self._stdminute) = tup[4:9]
|
||||
|
||||
(self._dstmonth,
|
||||
self._dstdayofweek, # Sunday = 0
|
||||
self._dstweeknumber, # Last = 5
|
||||
self._dsthour,
|
||||
self._dstminute) = tup[12:17]
|
||||
|
||||
self._dst_base_offset_ = self._dst_offset - self._std_offset
|
||||
self.hasdst = self._get_hasdst()
|
||||
|
||||
def __repr__(self):
|
||||
return "tzwin(%s)" % repr(self._name)
|
||||
|
||||
def __reduce__(self):
|
||||
return (self.__class__, (self._name,))
|
||||
|
||||
|
||||
class tzwinlocal(tzwinbase):
|
||||
"""
|
||||
Class representing the local time zone information in the Windows registry
|
||||
|
||||
While :class:`dateutil.tz.tzlocal` makes system calls (via the :mod:`time`
|
||||
module) to retrieve time zone information, ``tzwinlocal`` retrieves the
|
||||
rules directly from the Windows registry and creates an object like
|
||||
:class:`dateutil.tz.tzwin`.
|
||||
|
||||
Because Windows does not have an equivalent of :func:`time.tzset`, on
|
||||
Windows, :class:`dateutil.tz.tzlocal` instances will always reflect the
|
||||
time zone settings *at the time that the process was started*, meaning
|
||||
changes to the machine's time zone settings during the run of a program
|
||||
on Windows will **not** be reflected by :class:`dateutil.tz.tzlocal`.
|
||||
Because ``tzwinlocal`` reads the registry directly, it is unaffected by
|
||||
this issue.
|
||||
"""
|
||||
def __init__(self):
|
||||
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
|
||||
with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey:
|
||||
keydict = valuestodict(tzlocalkey)
|
||||
|
||||
self._std_abbr = keydict["StandardName"]
|
||||
self._dst_abbr = keydict["DaylightName"]
|
||||
|
||||
try:
|
||||
tzkeyname = text_type('{kn}\\{sn}').format(kn=TZKEYNAME,
|
||||
sn=self._std_abbr)
|
||||
with winreg.OpenKey(handle, tzkeyname) as tzkey:
|
||||
_keydict = valuestodict(tzkey)
|
||||
self._display = _keydict["Display"]
|
||||
except OSError:
|
||||
self._display = None
|
||||
|
||||
stdoffset = -keydict["Bias"]-keydict["StandardBias"]
|
||||
dstoffset = stdoffset-keydict["DaylightBias"]
|
||||
|
||||
self._std_offset = datetime.timedelta(minutes=stdoffset)
|
||||
self._dst_offset = datetime.timedelta(minutes=dstoffset)
|
||||
|
||||
# For reasons unclear, in this particular key, the day of week has been
|
||||
# moved to the END of the SYSTEMTIME structure.
|
||||
tup = struct.unpack("=8h", keydict["StandardStart"])
|
||||
|
||||
(self._stdmonth,
|
||||
self._stdweeknumber, # Last = 5
|
||||
self._stdhour,
|
||||
self._stdminute) = tup[1:5]
|
||||
|
||||
self._stddayofweek = tup[7]
|
||||
|
||||
tup = struct.unpack("=8h", keydict["DaylightStart"])
|
||||
|
||||
(self._dstmonth,
|
||||
self._dstweeknumber, # Last = 5
|
||||
self._dsthour,
|
||||
self._dstminute) = tup[1:5]
|
||||
|
||||
self._dstdayofweek = tup[7]
|
||||
|
||||
self._dst_base_offset_ = self._dst_offset - self._std_offset
|
||||
self.hasdst = self._get_hasdst()
|
||||
|
||||
def __repr__(self):
|
||||
return "tzwinlocal()"
|
||||
|
||||
def __str__(self):
|
||||
# str will return the standard name, not the daylight name.
|
||||
return "tzwinlocal(%s)" % repr(self._std_abbr)
|
||||
|
||||
def __reduce__(self):
|
||||
return (self.__class__, ())
|
||||
|
||||
|
||||
def picknthweekday(year, month, dayofweek, hour, minute, whichweek):
|
||||
""" dayofweek == 0 means Sunday, whichweek 5 means last instance """
|
||||
first = datetime.datetime(year, month, 1, hour, minute)
|
||||
|
||||
# This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6),
|
||||
# Because 7 % 7 = 0
|
||||
weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1)
|
||||
wd = weekdayone + ((whichweek - 1) * ONEWEEK)
|
||||
if (wd.month != month):
|
||||
wd -= ONEWEEK
|
||||
|
||||
return wd
|
||||
|
||||
|
||||
def valuestodict(key):
|
||||
"""Convert a registry key's values to a dictionary."""
|
||||
dout = {}
|
||||
size = winreg.QueryInfoKey(key)[1]
|
||||
tz_res = None
|
||||
|
||||
for i in range(size):
|
||||
key_name, value, dtype = winreg.EnumValue(key, i)
|
||||
if dtype == winreg.REG_DWORD or dtype == winreg.REG_DWORD_LITTLE_ENDIAN:
|
||||
# If it's a DWORD (32-bit integer), it's stored as unsigned - convert
|
||||
# that to a proper signed integer
|
||||
if value & (1 << 31):
|
||||
value = value - (1 << 32)
|
||||
elif dtype == winreg.REG_SZ:
|
||||
# If it's a reference to the tzres DLL, load the actual string
|
||||
if value.startswith('@tzres'):
|
||||
tz_res = tz_res or tzres()
|
||||
value = tz_res.name_from_string(value)
|
||||
|
||||
value = value.rstrip('\x00') # Remove trailing nulls
|
||||
|
||||
dout[key_name] = value
|
||||
|
||||
return dout
|
2
.local/lib/python3.8/site-packages/dateutil/tzwin.py
Normal file
2
.local/lib/python3.8/site-packages/dateutil/tzwin.py
Normal file
@ -0,0 +1,2 @@
|
||||
# tzwin has moved to dateutil.tz.win
|
||||
from .tz.win import *
|
71
.local/lib/python3.8/site-packages/dateutil/utils.py
Normal file
71
.local/lib/python3.8/site-packages/dateutil/utils.py
Normal file
@ -0,0 +1,71 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
This module offers general convenience and utility functions for dealing with
|
||||
datetimes.
|
||||
|
||||
.. versionadded:: 2.7.0
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from datetime import datetime, time
|
||||
|
||||
|
||||
def today(tzinfo=None):
|
||||
"""
|
||||
Returns a :py:class:`datetime` representing the current day at midnight
|
||||
|
||||
:param tzinfo:
|
||||
The time zone to attach (also used to determine the current day).
|
||||
|
||||
:return:
|
||||
A :py:class:`datetime.datetime` object representing the current day
|
||||
at midnight.
|
||||
"""
|
||||
|
||||
dt = datetime.now(tzinfo)
|
||||
return datetime.combine(dt.date(), time(0, tzinfo=tzinfo))
|
||||
|
||||
|
||||
def default_tzinfo(dt, tzinfo):
|
||||
"""
|
||||
Sets the ``tzinfo`` parameter on naive datetimes only
|
||||
|
||||
This is useful for example when you are provided a datetime that may have
|
||||
either an implicit or explicit time zone, such as when parsing a time zone
|
||||
string.
|
||||
|
||||
.. doctest::
|
||||
|
||||
>>> from dateutil.tz import tzoffset
|
||||
>>> from dateutil.parser import parse
|
||||
>>> from dateutil.utils import default_tzinfo
|
||||
>>> dflt_tz = tzoffset("EST", -18000)
|
||||
>>> print(default_tzinfo(parse('2014-01-01 12:30 UTC'), dflt_tz))
|
||||
2014-01-01 12:30:00+00:00
|
||||
>>> print(default_tzinfo(parse('2014-01-01 12:30'), dflt_tz))
|
||||
2014-01-01 12:30:00-05:00
|
||||
|
||||
:param dt:
|
||||
The datetime on which to replace the time zone
|
||||
|
||||
:param tzinfo:
|
||||
The :py:class:`datetime.tzinfo` subclass instance to assign to
|
||||
``dt`` if (and only if) it is naive.
|
||||
|
||||
:return:
|
||||
Returns an aware :py:class:`datetime.datetime`.
|
||||
"""
|
||||
if dt.tzinfo is not None:
|
||||
return dt
|
||||
else:
|
||||
return dt.replace(tzinfo=tzinfo)
|
||||
|
||||
|
||||
def within_delta(dt1, dt2, delta):
|
||||
"""
|
||||
Useful for comparing two datetimes that may have a negligible difference
|
||||
to be considered equal.
|
||||
"""
|
||||
delta = abs(delta)
|
||||
difference = dt1 - dt2
|
||||
return -delta <= difference <= delta
|
167
.local/lib/python3.8/site-packages/dateutil/zoneinfo/__init__.py
Normal file
167
.local/lib/python3.8/site-packages/dateutil/zoneinfo/__init__.py
Normal file
@ -0,0 +1,167 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import warnings
|
||||
import json
|
||||
|
||||
from tarfile import TarFile
|
||||
from pkgutil import get_data
|
||||
from io import BytesIO
|
||||
|
||||
from dateutil.tz import tzfile as _tzfile
|
||||
|
||||
__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"]
|
||||
|
||||
ZONEFILENAME = "dateutil-zoneinfo.tar.gz"
|
||||
METADATA_FN = 'METADATA'
|
||||
|
||||
|
||||
class tzfile(_tzfile):
|
||||
def __reduce__(self):
|
||||
return (gettz, (self._filename,))
|
||||
|
||||
|
||||
def getzoneinfofile_stream():
|
||||
try:
|
||||
return BytesIO(get_data(__name__, ZONEFILENAME))
|
||||
except IOError as e: # TODO switch to FileNotFoundError?
|
||||
warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror))
|
||||
return None
|
||||
|
||||
|
||||
class ZoneInfoFile(object):
|
||||
def __init__(self, zonefile_stream=None):
|
||||
if zonefile_stream is not None:
|
||||
with TarFile.open(fileobj=zonefile_stream) as tf:
|
||||
self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name)
|
||||
for zf in tf.getmembers()
|
||||
if zf.isfile() and zf.name != METADATA_FN}
|
||||
# deal with links: They'll point to their parent object. Less
|
||||
# waste of memory
|
||||
links = {zl.name: self.zones[zl.linkname]
|
||||
for zl in tf.getmembers() if
|
||||
zl.islnk() or zl.issym()}
|
||||
self.zones.update(links)
|
||||
try:
|
||||
metadata_json = tf.extractfile(tf.getmember(METADATA_FN))
|
||||
metadata_str = metadata_json.read().decode('UTF-8')
|
||||
self.metadata = json.loads(metadata_str)
|
||||
except KeyError:
|
||||
# no metadata in tar file
|
||||
self.metadata = None
|
||||
else:
|
||||
self.zones = {}
|
||||
self.metadata = None
|
||||
|
||||
def get(self, name, default=None):
|
||||
"""
|
||||
Wrapper for :func:`ZoneInfoFile.zones.get`. This is a convenience method
|
||||
for retrieving zones from the zone dictionary.
|
||||
|
||||
:param name:
|
||||
The name of the zone to retrieve. (Generally IANA zone names)
|
||||
|
||||
:param default:
|
||||
The value to return in the event of a missing key.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
|
||||
"""
|
||||
return self.zones.get(name, default)
|
||||
|
||||
|
||||
# The current API has gettz as a module function, although in fact it taps into
|
||||
# a stateful class. So as a workaround for now, without changing the API, we
|
||||
# will create a new "global" class instance the first time a user requests a
|
||||
# timezone. Ugly, but adheres to the api.
|
||||
#
|
||||
# TODO: Remove after deprecation period.
|
||||
_CLASS_ZONE_INSTANCE = []
|
||||
|
||||
|
||||
def get_zonefile_instance(new_instance=False):
|
||||
"""
|
||||
This is a convenience function which provides a :class:`ZoneInfoFile`
|
||||
instance using the data provided by the ``dateutil`` package. By default, it
|
||||
caches a single instance of the ZoneInfoFile object and returns that.
|
||||
|
||||
:param new_instance:
|
||||
If ``True``, a new instance of :class:`ZoneInfoFile` is instantiated and
|
||||
used as the cached instance for the next call. Otherwise, new instances
|
||||
are created only as necessary.
|
||||
|
||||
:return:
|
||||
Returns a :class:`ZoneInfoFile` object.
|
||||
|
||||
.. versionadded:: 2.6
|
||||
"""
|
||||
if new_instance:
|
||||
zif = None
|
||||
else:
|
||||
zif = getattr(get_zonefile_instance, '_cached_instance', None)
|
||||
|
||||
if zif is None:
|
||||
zif = ZoneInfoFile(getzoneinfofile_stream())
|
||||
|
||||
get_zonefile_instance._cached_instance = zif
|
||||
|
||||
return zif
|
||||
|
||||
|
||||
def gettz(name):
|
||||
"""
|
||||
This retrieves a time zone from the local zoneinfo tarball that is packaged
|
||||
with dateutil.
|
||||
|
||||
:param name:
|
||||
An IANA-style time zone name, as found in the zoneinfo file.
|
||||
|
||||
:return:
|
||||
Returns a :class:`dateutil.tz.tzfile` time zone object.
|
||||
|
||||
.. warning::
|
||||
It is generally inadvisable to use this function, and it is only
|
||||
provided for API compatibility with earlier versions. This is *not*
|
||||
equivalent to ``dateutil.tz.gettz()``, which selects an appropriate
|
||||
time zone based on the inputs, favoring system zoneinfo. This is ONLY
|
||||
for accessing the dateutil-specific zoneinfo (which may be out of
|
||||
date compared to the system zoneinfo).
|
||||
|
||||
.. deprecated:: 2.6
|
||||
If you need to use a specific zoneinfofile over the system zoneinfo,
|
||||
instantiate a :class:`dateutil.zoneinfo.ZoneInfoFile` object and call
|
||||
:func:`dateutil.zoneinfo.ZoneInfoFile.get(name)` instead.
|
||||
|
||||
Use :func:`get_zonefile_instance` to retrieve an instance of the
|
||||
dateutil-provided zoneinfo.
|
||||
"""
|
||||
warnings.warn("zoneinfo.gettz() will be removed in future versions, "
|
||||
"to use the dateutil-provided zoneinfo files, instantiate a "
|
||||
"ZoneInfoFile object and use ZoneInfoFile.zones.get() "
|
||||
"instead. See the documentation for details.",
|
||||
DeprecationWarning)
|
||||
|
||||
if len(_CLASS_ZONE_INSTANCE) == 0:
|
||||
_CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream()))
|
||||
return _CLASS_ZONE_INSTANCE[0].zones.get(name)
|
||||
|
||||
|
||||
def gettz_db_metadata():
|
||||
""" Get the zonefile metadata
|
||||
|
||||
See `zonefile_metadata`_
|
||||
|
||||
:returns:
|
||||
A dictionary with the database metadata
|
||||
|
||||
.. deprecated:: 2.6
|
||||
See deprecation warning in :func:`zoneinfo.gettz`. To get metadata,
|
||||
query the attribute ``zoneinfo.ZoneInfoFile.metadata``.
|
||||
"""
|
||||
warnings.warn("zoneinfo.gettz_db_metadata() will be removed in future "
|
||||
"versions, to use the dateutil-provided zoneinfo files, "
|
||||
"ZoneInfoFile object and query the 'metadata' attribute "
|
||||
"instead. See the documentation for details.",
|
||||
DeprecationWarning)
|
||||
|
||||
if len(_CLASS_ZONE_INSTANCE) == 0:
|
||||
_CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream()))
|
||||
return _CLASS_ZONE_INSTANCE[0].metadata
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,75 @@
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
import json
|
||||
from subprocess import check_call, check_output
|
||||
from tarfile import TarFile
|
||||
|
||||
from dateutil.zoneinfo import METADATA_FN, ZONEFILENAME
|
||||
|
||||
|
||||
def rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None):
|
||||
"""Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar*
|
||||
|
||||
filename is the timezone tarball from ``ftp.iana.org/tz``.
|
||||
|
||||
"""
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
zonedir = os.path.join(tmpdir, "zoneinfo")
|
||||
moduledir = os.path.dirname(__file__)
|
||||
try:
|
||||
with TarFile.open(filename) as tf:
|
||||
for name in zonegroups:
|
||||
tf.extract(name, tmpdir)
|
||||
filepaths = [os.path.join(tmpdir, n) for n in zonegroups]
|
||||
|
||||
_run_zic(zonedir, filepaths)
|
||||
|
||||
# write metadata file
|
||||
with open(os.path.join(zonedir, METADATA_FN), 'w') as f:
|
||||
json.dump(metadata, f, indent=4, sort_keys=True)
|
||||
target = os.path.join(moduledir, ZONEFILENAME)
|
||||
with TarFile.open(target, "w:%s" % format) as tf:
|
||||
for entry in os.listdir(zonedir):
|
||||
entrypath = os.path.join(zonedir, entry)
|
||||
tf.add(entrypath, entry)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def _run_zic(zonedir, filepaths):
|
||||
"""Calls the ``zic`` compiler in a compatible way to get a "fat" binary.
|
||||
|
||||
Recent versions of ``zic`` default to ``-b slim``, while older versions
|
||||
don't even have the ``-b`` option (but default to "fat" binaries). The
|
||||
current version of dateutil does not support Version 2+ TZif files, which
|
||||
causes problems when used in conjunction with "slim" binaries, so this
|
||||
function is used to ensure that we always get a "fat" binary.
|
||||
"""
|
||||
|
||||
try:
|
||||
help_text = check_output(["zic", "--help"])
|
||||
except OSError as e:
|
||||
_print_on_nosuchfile(e)
|
||||
raise
|
||||
|
||||
if b"-b " in help_text:
|
||||
bloat_args = ["-b", "fat"]
|
||||
else:
|
||||
bloat_args = []
|
||||
|
||||
check_call(["zic"] + bloat_args + ["-d", zonedir] + filepaths)
|
||||
|
||||
|
||||
def _print_on_nosuchfile(e):
|
||||
"""Print helpful troubleshooting message
|
||||
|
||||
e is an exception raised by subprocess.check_call()
|
||||
|
||||
"""
|
||||
if e.errno == 2:
|
||||
logging.error(
|
||||
"Could not find zic. Perhaps you need to install "
|
||||
"libc-bin or some other package that provides it, "
|
||||
"or it's not in your PATH?")
|
@ -0,0 +1 @@
|
||||
pip
|
@ -0,0 +1,21 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: kaggle
|
||||
Version: 1.5.12
|
||||
Summary: Kaggle API
|
||||
Home-page: https://github.com/Kaggle/kaggle-api
|
||||
Author: Kaggle
|
||||
Author-email: support@kaggle.com
|
||||
License: Apache 2.0
|
||||
Keywords: Kaggle,API
|
||||
Platform: UNKNOWN
|
||||
Requires-Dist: certifi
|
||||
Requires-Dist: python-dateutil
|
||||
Requires-Dist: python-slugify
|
||||
Requires-Dist: requests
|
||||
Requires-Dist: six (>=1.10)
|
||||
Requires-Dist: tqdm
|
||||
Requires-Dist: urllib3
|
||||
|
||||
Official API for https://www.kaggle.com, accessible using a command line tool implemented in Python. Beta release - Kaggle reserves the right to modify the API functionality currently offered.
|
||||
|
||||
|
@ -0,0 +1,51 @@
|
||||
../../../bin/kaggle,sha256=A9di5OMstgwIa4awH66t2E8sTDDV9cPOHl9yX3rXSFs,209
|
||||
kaggle-1.5.12.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
kaggle-1.5.12.dist-info/METADATA,sha256=9XgZyIpRZCX6Pec9f5cRWCPoiZd8Ae1ziKDiXID6aJY,601
|
||||
kaggle-1.5.12.dist-info/RECORD,,
|
||||
kaggle-1.5.12.dist-info/WHEEL,sha256=g4nMs7d-Xl9-xC9XovUrsDHGXt-FT0E17Yqo92DEfvY,92
|
||||
kaggle-1.5.12.dist-info/entry_points.txt,sha256=rBWEp_W_lHPkwqY06UlBKVv2PMxJTOM8OkgUK2iGJ6I,44
|
||||
kaggle-1.5.12.dist-info/top_level.txt,sha256=Y-2G5HyGxBsP1wVIKTJK5OB4qbSlC-MaA8-PjkezJzo,7
|
||||
kaggle/__init__.py,sha256=VOCRE9nkEDIMvAAVOe7xbODKjl-O0ezOFZqV0zkvVrU,799
|
||||
kaggle/__pycache__/__init__.cpython-38.pyc,,
|
||||
kaggle/__pycache__/api_client.cpython-38.pyc,,
|
||||
kaggle/__pycache__/cli.cpython-38.pyc,,
|
||||
kaggle/__pycache__/configuration.cpython-38.pyc,,
|
||||
kaggle/__pycache__/rest.cpython-38.pyc,,
|
||||
kaggle/api/__init__.py,sha256=EPLRR7HTv9s0ZRMX3mNyGyV94U3ICtQnomVXG9gnO4k,742
|
||||
kaggle/api/__pycache__/__init__.cpython-38.pyc,,
|
||||
kaggle/api/__pycache__/kaggle_api.cpython-38.pyc,,
|
||||
kaggle/api/__pycache__/kaggle_api_extended.cpython-38.pyc,,
|
||||
kaggle/api/kaggle_api.py,sha256=78MxbKuY93-2hzD3r9bTvglmZypqVofKMxwuE-hZhpY,125827
|
||||
kaggle/api/kaggle_api_extended.py,sha256=h50qo4V1gt9nK4JEO9wMhni3qKkfu_W9JkUDdNAN6BM,108002
|
||||
kaggle/api_client.py,sha256=4PXEzo6jv11lxA_zv6mUm0SWy2jZEDvIswWeSpSidok,25458
|
||||
kaggle/cli.py,sha256=G8_hCpjNsrUvFKyqrbP0h52_gP9W_BFqJ35j1wVfV58,53449
|
||||
kaggle/configuration.py,sha256=NPuxMObNIRQY44xUnhC8gJaw60uHXirdXeb8VEmVlhQ,8858
|
||||
kaggle/models/__init__.py,sha256=QhrMWG0tYILF67c9rUhTRCBy-15Ar-ouDp28ZRv6ysc,1481
|
||||
kaggle/models/__pycache__/__init__.cpython-38.pyc,,
|
||||
kaggle/models/__pycache__/collaborator.cpython-38.pyc,,
|
||||
kaggle/models/__pycache__/dataset_column.cpython-38.pyc,,
|
||||
kaggle/models/__pycache__/dataset_new_request.cpython-38.pyc,,
|
||||
kaggle/models/__pycache__/dataset_new_version_request.cpython-38.pyc,,
|
||||
kaggle/models/__pycache__/dataset_update_settings_request.cpython-38.pyc,,
|
||||
kaggle/models/__pycache__/dataset_upload_file.cpython-38.pyc,,
|
||||
kaggle/models/__pycache__/error.cpython-38.pyc,,
|
||||
kaggle/models/__pycache__/kaggle_models_extended.cpython-38.pyc,,
|
||||
kaggle/models/__pycache__/kernel_push_request.cpython-38.pyc,,
|
||||
kaggle/models/__pycache__/license.cpython-38.pyc,,
|
||||
kaggle/models/__pycache__/result.cpython-38.pyc,,
|
||||
kaggle/models/collaborator.py,sha256=RyKGMWV-Yj1GiWGKXeeD1fnzi3X6Cl1ALd4TgnXFmdA,4851
|
||||
kaggle/models/dataset_column.py,sha256=iBy1P9Q2jgFpMhCaMX7sBW5a40ogieOSRu9pY2v8t2A,7424
|
||||
kaggle/models/dataset_new_request.py,sha256=P6Uy54wpDrcsa_OVqfZzVVUR-EcQZmi4nAMp7yVFGO4,12317
|
||||
kaggle/models/dataset_new_version_request.py,sha256=1ZZMpnPFWmXjkWdLhOfaRqd7pFwCLMcRacgr3BzgDu4,10027
|
||||
kaggle/models/dataset_update_settings_request.py,sha256=boOEHQkuAx2tW8epjSvfr4cUGLIMXSqRogZuAsJSqZ4,10114
|
||||
kaggle/models/dataset_upload_file.py,sha256=-9Q9Ys66YsK9G_KauFy0roaIYiv3T08V43b8HCizZ70,5544
|
||||
kaggle/models/error.py,sha256=-IUwKPrBxktlWgsGP3WmFL-E7BA-KSCLyzNFwEWUFJ8,4342
|
||||
kaggle/models/kaggle_models_extended.py,sha256=mZ4V_kyIOPONZx8PwfUZvYMa21mkeSaagfnKmar3P8k,6180
|
||||
kaggle/models/kernel_push_request.py,sha256=C_RqbuocgqJ5dVw5mmS4G3FGTp2C57qSagn5hUNj7wg,18059
|
||||
kaggle/models/license.py,sha256=ULPu9kN4eqoX9j-wjReooXXU4Q8aIjd8rZPm2OMKcS4,4077
|
||||
kaggle/models/result.py,sha256=E-_lV8pwAoN4fXgUUAHwUCsWQWo47_fwf7ycQJ0unLE,2895
|
||||
kaggle/rest.py,sha256=2-yrpOGjNNEMiPWuwkubNj7KU4RZJGP6N7vJjyVrVYY,13927
|
||||
kaggle/tests/__init__.py,sha256=tlLdBBlKzW2I7EkqKj5SOUBkjp3ADimepFL8my4txM8,596
|
||||
kaggle/tests/__pycache__/__init__.cpython-38.pyc,,
|
||||
kaggle/tests/__pycache__/test_authenticate.cpython-38.pyc,,
|
||||
kaggle/tests/test_authenticate.py,sha256=yJlMOhreo4wA5EiCO4pFydAvczAN4gz0V93MjZnmLug,1780
|
@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.34.2)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
@ -0,0 +1,3 @@
|
||||
[console_scripts]
|
||||
kaggle = kaggle.cli:main
|
||||
|
@ -0,0 +1 @@
|
||||
kaggle
|
23
.local/lib/python3.8/site-packages/kaggle/__init__.py
Normal file
23
.local/lib/python3.8/site-packages/kaggle/__init__.py
Normal file
@ -0,0 +1,23 @@
|
||||
#!/usr/bin/python
|
||||
#
|
||||
# Copyright 2021 Kaggle Inc
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# coding=utf-8
|
||||
from __future__ import absolute_import
|
||||
from kaggle.api.kaggle_api_extended import KaggleApi
|
||||
from kaggle.api_client import ApiClient
|
||||
|
||||
api = KaggleApi(ApiClient())
|
||||
api.authenticate()
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
22
.local/lib/python3.8/site-packages/kaggle/api/__init__.py
Normal file
22
.local/lib/python3.8/site-packages/kaggle/api/__init__.py
Normal file
@ -0,0 +1,22 @@
|
||||
#!/usr/bin/python
|
||||
#
|
||||
# Copyright 2021 Kaggle Inc
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
# flake8: noqa
|
||||
|
||||
# import apis into api package
|
||||
from kaggle.api.kaggle_api_extended import KaggleApi
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
2885
.local/lib/python3.8/site-packages/kaggle/api/kaggle_api.py
Normal file
2885
.local/lib/python3.8/site-packages/kaggle/api/kaggle_api.py
Normal file
File diff suppressed because it is too large
Load Diff
2637
.local/lib/python3.8/site-packages/kaggle/api/kaggle_api_extended.py
Normal file
2637
.local/lib/python3.8/site-packages/kaggle/api/kaggle_api_extended.py
Normal file
File diff suppressed because it is too large
Load Diff
633
.local/lib/python3.8/site-packages/kaggle/api_client.py
Normal file
633
.local/lib/python3.8/site-packages/kaggle/api_client.py
Normal file
@ -0,0 +1,633 @@
|
||||
#!/usr/bin/python
|
||||
#
|
||||
# Copyright 2021 Kaggle Inc
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# coding: utf-8
|
||||
"""
|
||||
Kaggle API
|
||||
|
||||
API for kaggle.com # noqa: E501
|
||||
|
||||
OpenAPI spec version: 1
|
||||
|
||||
Generated by: https://github.com/swagger-api/swagger-codegen.git
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import mimetypes
|
||||
from multiprocessing.pool import ThreadPool
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
|
||||
# python 2 and python 3 compatibility library
|
||||
import six
|
||||
from six.moves.urllib.parse import quote
|
||||
|
||||
from kaggle.configuration import Configuration
|
||||
import kaggle.models
|
||||
from kaggle import rest
|
||||
|
||||
|
||||
class ApiClient(object):
|
||||
"""Generic API client for Swagger client library builds.
|
||||
|
||||
Swagger generic API client. This client handles the client-
|
||||
server communication, and is invariant across implementations. Specifics of
|
||||
the methods and models for each application are generated from the Swagger
|
||||
templates.
|
||||
|
||||
NOTE: This class is auto generated by the swagger code generator program.
|
||||
Ref: https://github.com/swagger-api/swagger-codegen
|
||||
Do not edit the class manually.
|
||||
|
||||
:param configuration: .Configuration object for this client
|
||||
:param header_name: a header to pass when making calls to the API.
|
||||
:param header_value: a header value to pass when making calls to
|
||||
the API.
|
||||
:param cookie: a cookie to include in the header when making calls
|
||||
to the API
|
||||
"""
|
||||
|
||||
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
|
||||
NATIVE_TYPES_MAPPING = {
|
||||
'int': int,
|
||||
'long': int if six.PY3 else long, # noqa: F821
|
||||
'float': float,
|
||||
'str': str,
|
||||
'bool': bool,
|
||||
'date': datetime.date,
|
||||
'datetime': datetime.datetime,
|
||||
'object': object,
|
||||
}
|
||||
|
||||
def __init__(self, configuration=None, header_name=None, header_value=None,
|
||||
cookie=None):
|
||||
if configuration is None:
|
||||
configuration = Configuration()
|
||||
self.configuration = configuration
|
||||
|
||||
self.pool = ThreadPool()
|
||||
self.rest_client = rest.RESTClientObject(configuration)
|
||||
self.default_headers = {}
|
||||
if header_name is not None:
|
||||
self.default_headers[header_name] = header_value
|
||||
self.cookie = cookie
|
||||
# Set default User-Agent.
|
||||
self.user_agent = 'Swagger-Codegen/1/python'
|
||||
|
||||
@property
|
||||
def user_agent(self):
|
||||
"""User agent for this API client"""
|
||||
return self.default_headers['User-Agent']
|
||||
|
||||
@user_agent.setter
|
||||
def user_agent(self, value):
|
||||
self.default_headers['User-Agent'] = value
|
||||
|
||||
def set_default_header(self, header_name, header_value):
|
||||
self.default_headers[header_name] = header_value
|
||||
|
||||
def __call_api(
|
||||
self, resource_path, method, path_params=None,
|
||||
query_params=None, header_params=None, body=None, post_params=None,
|
||||
files=None, response_type=None, auth_settings=None,
|
||||
_return_http_data_only=None, collection_formats=None,
|
||||
_preload_content=True, _request_timeout=None):
|
||||
|
||||
config = self.configuration
|
||||
|
||||
# header parameters
|
||||
header_params = header_params or {}
|
||||
header_params.update(self.default_headers)
|
||||
if self.cookie:
|
||||
header_params['Cookie'] = self.cookie
|
||||
if header_params:
|
||||
header_params = self.sanitize_for_serialization(header_params)
|
||||
header_params = dict(self.parameters_to_tuples(header_params,
|
||||
collection_formats))
|
||||
|
||||
# path parameters
|
||||
if path_params:
|
||||
path_params = self.sanitize_for_serialization(path_params)
|
||||
path_params = self.parameters_to_tuples(path_params,
|
||||
collection_formats)
|
||||
for k, v in path_params:
|
||||
# specified safe chars, encode everything
|
||||
resource_path = resource_path.replace(
|
||||
'{%s}' % k,
|
||||
quote(str(v), safe=config.safe_chars_for_path_param)
|
||||
)
|
||||
|
||||
# query parameters
|
||||
if query_params:
|
||||
query_params = self.sanitize_for_serialization(query_params)
|
||||
query_params = self.parameters_to_tuples(query_params,
|
||||
collection_formats)
|
||||
|
||||
# post parameters
|
||||
if post_params or files:
|
||||
post_params = self.prepare_post_parameters(post_params, files)
|
||||
post_params = self.sanitize_for_serialization(post_params)
|
||||
post_params = self.parameters_to_tuples(post_params,
|
||||
collection_formats)
|
||||
|
||||
# auth setting
|
||||
self.update_params_for_auth(header_params, query_params, auth_settings)
|
||||
|
||||
# body
|
||||
if body:
|
||||
body = self.sanitize_for_serialization(body)
|
||||
|
||||
# request url
|
||||
url = self.configuration.host + resource_path
|
||||
|
||||
# perform request and return response
|
||||
response_data = self.request(
|
||||
method, url, query_params=query_params, headers=header_params,
|
||||
post_params=post_params, body=body,
|
||||
_preload_content=_preload_content,
|
||||
_request_timeout=_request_timeout)
|
||||
|
||||
self.last_response = response_data
|
||||
|
||||
return_data = response_data
|
||||
if _preload_content:
|
||||
# deserialize response data
|
||||
if response_type:
|
||||
return_data = self.deserialize(response_data, response_type)
|
||||
else:
|
||||
return_data = None
|
||||
|
||||
if _return_http_data_only:
|
||||
return (return_data)
|
||||
else:
|
||||
return (return_data, response_data.status,
|
||||
response_data.getheaders())
|
||||
|
||||
def sanitize_for_serialization(self, obj):
|
||||
"""Builds a JSON POST object.
|
||||
|
||||
If obj is None, return None.
|
||||
If obj is str, int, long, float, bool, return directly.
|
||||
If obj is datetime.datetime, datetime.date
|
||||
convert to string in iso8601 format.
|
||||
If obj is list, sanitize each element in the list.
|
||||
If obj is dict, return the dict.
|
||||
If obj is swagger model, return the properties dict.
|
||||
|
||||
:param obj: The data to serialize.
|
||||
:return: The serialized form of data.
|
||||
"""
|
||||
if obj is None:
|
||||
return None
|
||||
elif isinstance(obj, self.PRIMITIVE_TYPES):
|
||||
return obj
|
||||
elif isinstance(obj, list):
|
||||
return [self.sanitize_for_serialization(sub_obj)
|
||||
for sub_obj in obj]
|
||||
elif isinstance(obj, tuple):
|
||||
return tuple(self.sanitize_for_serialization(sub_obj)
|
||||
for sub_obj in obj)
|
||||
elif isinstance(obj, (datetime.datetime, datetime.date)):
|
||||
return obj.isoformat()
|
||||
|
||||
if isinstance(obj, dict):
|
||||
obj_dict = obj
|
||||
else:
|
||||
# Convert model obj to dict except
|
||||
# attributes `swagger_types`, `attribute_map`
|
||||
# and attributes which value is not None.
|
||||
# Convert attribute name to json key in
|
||||
# model definition for request.
|
||||
obj_dict = {obj.attribute_map[attr]: getattr(obj, attr)
|
||||
for attr, _ in six.iteritems(obj.swagger_types)
|
||||
if getattr(obj, attr) is not None}
|
||||
|
||||
return {key: self.sanitize_for_serialization(val)
|
||||
for key, val in six.iteritems(obj_dict)}
|
||||
|
||||
def deserialize(self, response, response_type):
|
||||
"""Deserializes response into an object.
|
||||
|
||||
:param response: RESTResponse object to be deserialized.
|
||||
:param response_type: class literal for
|
||||
deserialized object, or string of class name.
|
||||
|
||||
:return: deserialized object.
|
||||
"""
|
||||
# handle file downloading
|
||||
# save response body into a tmp file and return the instance
|
||||
if response_type == "file":
|
||||
return self.__deserialize_file(response)
|
||||
|
||||
# fetch data from response object
|
||||
try:
|
||||
data = json.loads(response.data)
|
||||
except ValueError:
|
||||
data = response.data
|
||||
|
||||
return self.__deserialize(data, response_type)
|
||||
|
||||
def __deserialize(self, data, klass):
|
||||
"""Deserializes dict, list, str into an object.
|
||||
|
||||
:param data: dict, list or str.
|
||||
:param klass: class literal, or string of class name.
|
||||
|
||||
:return: object.
|
||||
"""
|
||||
if data is None:
|
||||
return None
|
||||
|
||||
if type(klass) == str:
|
||||
if klass.startswith('list['):
|
||||
sub_kls = re.match('list\[(.*)\]', klass).group(1)
|
||||
return [self.__deserialize(sub_data, sub_kls)
|
||||
for sub_data in data]
|
||||
|
||||
if klass.startswith('dict('):
|
||||
sub_kls = re.match('dict\(([^,]*), (.*)\)', klass).group(2)
|
||||
return {k: self.__deserialize(v, sub_kls)
|
||||
for k, v in six.iteritems(data)}
|
||||
|
||||
# convert str to class
|
||||
if klass in self.NATIVE_TYPES_MAPPING:
|
||||
klass = self.NATIVE_TYPES_MAPPING[klass]
|
||||
else:
|
||||
klass = getattr(kaggle.models, klass)
|
||||
|
||||
if klass in self.PRIMITIVE_TYPES:
|
||||
return self.__deserialize_primitive(data, klass)
|
||||
elif klass == object:
|
||||
return self.__deserialize_object(data)
|
||||
elif klass == datetime.date:
|
||||
return self.__deserialize_date(data)
|
||||
elif klass == datetime.datetime:
|
||||
return self.__deserialize_datatime(data)
|
||||
else:
|
||||
return self.__deserialize_model(data, klass)
|
||||
|
||||
def call_api(self, resource_path, method,
|
||||
path_params=None, query_params=None, header_params=None,
|
||||
body=None, post_params=None, files=None,
|
||||
response_type=None, auth_settings=None, async_req=None,
|
||||
_return_http_data_only=None, collection_formats=None,
|
||||
_preload_content=True, _request_timeout=None):
|
||||
"""Makes the HTTP request (synchronous) and returns deserialized data.
|
||||
|
||||
To make an async request, set the async_req parameter.
|
||||
|
||||
:param resource_path: Path to method endpoint.
|
||||
:param method: Method to call.
|
||||
:param path_params: Path parameters in the url.
|
||||
:param query_params: Query parameters in the url.
|
||||
:param header_params: Header parameters to be
|
||||
placed in the request header.
|
||||
:param body: Request body.
|
||||
:param post_params dict: Request post form parameters,
|
||||
for `application/x-www-form-urlencoded`, `multipart/form-data`.
|
||||
:param auth_settings list: Auth Settings names for the request.
|
||||
:param response: Response data type.
|
||||
:param files dict: key -> filename, value -> filepath,
|
||||
for `multipart/form-data`.
|
||||
:param async_req bool: execute request asynchronously
|
||||
:param _return_http_data_only: response data without head status code
|
||||
and headers
|
||||
:param collection_formats: dict of collection formats for path, query,
|
||||
header, and post parameters.
|
||||
:param _preload_content: if False, the urllib3.HTTPResponse object will
|
||||
be returned without reading/decoding response
|
||||
data. Default is True.
|
||||
:param _request_timeout: timeout setting for this request. If one
|
||||
number provided, it will be total request
|
||||
timeout. It can also be a pair (tuple) of
|
||||
(connection, read) timeouts.
|
||||
:return:
|
||||
If async_req parameter is True,
|
||||
the request will be called asynchronously.
|
||||
The method will return the request thread.
|
||||
If parameter async_req is False or missing,
|
||||
then the method will return the response directly.
|
||||
"""
|
||||
if not async_req:
|
||||
return self.__call_api(resource_path, method,
|
||||
path_params, query_params, header_params,
|
||||
body, post_params, files,
|
||||
response_type, auth_settings,
|
||||
_return_http_data_only, collection_formats,
|
||||
_preload_content, _request_timeout)
|
||||
else:
|
||||
thread = self.pool.apply_async(self.__call_api, (resource_path,
|
||||
method, path_params, query_params,
|
||||
header_params, body,
|
||||
post_params, files,
|
||||
response_type, auth_settings,
|
||||
_return_http_data_only,
|
||||
collection_formats,
|
||||
_preload_content, _request_timeout))
|
||||
return thread
|
||||
|
||||
def request(self, method, url, query_params=None, headers=None,
|
||||
post_params=None, body=None, _preload_content=True,
|
||||
_request_timeout=None):
|
||||
"""Makes the HTTP request using RESTClient."""
|
||||
if method == "GET":
|
||||
return self.rest_client.GET(url,
|
||||
query_params=query_params,
|
||||
_preload_content=_preload_content,
|
||||
_request_timeout=_request_timeout,
|
||||
headers=headers)
|
||||
elif method == "HEAD":
|
||||
return self.rest_client.HEAD(url,
|
||||
query_params=query_params,
|
||||
_preload_content=_preload_content,
|
||||
_request_timeout=_request_timeout,
|
||||
headers=headers)
|
||||
elif method == "OPTIONS":
|
||||
return self.rest_client.OPTIONS(url,
|
||||
query_params=query_params,
|
||||
headers=headers,
|
||||
post_params=post_params,
|
||||
_preload_content=_preload_content,
|
||||
_request_timeout=_request_timeout,
|
||||
body=body)
|
||||
elif method == "POST":
|
||||
return self.rest_client.POST(url,
|
||||
query_params=query_params,
|
||||
headers=headers,
|
||||
post_params=post_params,
|
||||
_preload_content=_preload_content,
|
||||
_request_timeout=_request_timeout,
|
||||
body=body)
|
||||
elif method == "PUT":
|
||||
return self.rest_client.PUT(url,
|
||||
query_params=query_params,
|
||||
headers=headers,
|
||||
post_params=post_params,
|
||||
_preload_content=_preload_content,
|
||||
_request_timeout=_request_timeout,
|
||||
body=body)
|
||||
elif method == "PATCH":
|
||||
return self.rest_client.PATCH(url,
|
||||
query_params=query_params,
|
||||
headers=headers,
|
||||
post_params=post_params,
|
||||
_preload_content=_preload_content,
|
||||
_request_timeout=_request_timeout,
|
||||
body=body)
|
||||
elif method == "DELETE":
|
||||
return self.rest_client.DELETE(url,
|
||||
query_params=query_params,
|
||||
headers=headers,
|
||||
_preload_content=_preload_content,
|
||||
_request_timeout=_request_timeout,
|
||||
body=body)
|
||||
else:
|
||||
raise ValueError(
|
||||
"http method must be `GET`, `HEAD`, `OPTIONS`,"
|
||||
" `POST`, `PATCH`, `PUT` or `DELETE`."
|
||||
)
|
||||
|
||||
def parameters_to_tuples(self, params, collection_formats):
|
||||
"""Get parameters as list of tuples, formatting collections.
|
||||
|
||||
:param params: Parameters as dict or list of two-tuples
|
||||
:param dict collection_formats: Parameter collection formats
|
||||
:return: Parameters as list of tuples, collections formatted
|
||||
"""
|
||||
new_params = []
|
||||
if collection_formats is None:
|
||||
collection_formats = {}
|
||||
for k, v in six.iteritems(params) if isinstance(params, dict) else params: # noqa: E501
|
||||
if k in collection_formats:
|
||||
collection_format = collection_formats[k]
|
||||
if collection_format == 'multi':
|
||||
new_params.extend((k, value) for value in v)
|
||||
else:
|
||||
if collection_format == 'ssv':
|
||||
delimiter = ' '
|
||||
elif collection_format == 'tsv':
|
||||
delimiter = '\t'
|
||||
elif collection_format == 'pipes':
|
||||
delimiter = '|'
|
||||
else: # csv is the default
|
||||
delimiter = ','
|
||||
new_params.append(
|
||||
(k, delimiter.join(str(value) for value in v)))
|
||||
else:
|
||||
new_params.append((k, v))
|
||||
return new_params
|
||||
|
||||
def prepare_post_parameters(self, post_params=None, files=None):
|
||||
"""Builds form parameters.
|
||||
|
||||
:param post_params: Normal form parameters.
|
||||
:param files: File parameters.
|
||||
:return: Form parameters with files.
|
||||
"""
|
||||
params = []
|
||||
|
||||
if post_params:
|
||||
params = post_params
|
||||
|
||||
if files:
|
||||
for k, v in six.iteritems(files):
|
||||
if not v:
|
||||
continue
|
||||
file_names = v if type(v) is list else [v]
|
||||
for n in file_names:
|
||||
with open(n, 'rb') as f:
|
||||
filename = os.path.basename(f.name)
|
||||
filedata = f.read()
|
||||
mimetype = (mimetypes.guess_type(filename)[0] or
|
||||
'application/octet-stream')
|
||||
params.append(
|
||||
tuple([k, tuple([filename, filedata, mimetype])]))
|
||||
|
||||
return params
|
||||
|
||||
def select_header_accept(self, accepts):
|
||||
"""Returns `Accept` based on an array of accepts provided.
|
||||
|
||||
:param accepts: List of headers.
|
||||
:return: Accept (e.g. application/json).
|
||||
"""
|
||||
if not accepts:
|
||||
return
|
||||
|
||||
accepts = [x.lower() for x in accepts]
|
||||
|
||||
if 'application/json' in accepts:
|
||||
return 'application/json'
|
||||
else:
|
||||
return ', '.join(accepts)
|
||||
|
||||
def select_header_content_type(self, content_types):
|
||||
"""Returns `Content-Type` based on an array of content_types provided.
|
||||
|
||||
:param content_types: List of content-types.
|
||||
:return: Content-Type (e.g. application/json).
|
||||
"""
|
||||
if not content_types:
|
||||
return 'application/json'
|
||||
|
||||
content_types = [x.lower() for x in content_types]
|
||||
|
||||
if 'application/json' in content_types or '*/*' in content_types:
|
||||
return 'application/json'
|
||||
else:
|
||||
return content_types[0]
|
||||
|
||||
def update_params_for_auth(self, headers, querys, auth_settings):
|
||||
"""Updates header and query params based on authentication setting.
|
||||
|
||||
:param headers: Header parameters dict to be updated.
|
||||
:param querys: Query parameters tuple list to be updated.
|
||||
:param auth_settings: Authentication setting identifiers list.
|
||||
"""
|
||||
if not auth_settings:
|
||||
return
|
||||
|
||||
for auth in auth_settings:
|
||||
auth_setting = self.configuration.auth_settings().get(auth)
|
||||
if auth_setting:
|
||||
if not auth_setting['value']:
|
||||
continue
|
||||
elif auth_setting['in'] == 'header':
|
||||
headers[auth_setting['key']] = auth_setting['value']
|
||||
elif auth_setting['in'] == 'query':
|
||||
querys.append((auth_setting['key'], auth_setting['value']))
|
||||
else:
|
||||
raise ValueError(
|
||||
'Authentication token must be in `query` or `header`'
|
||||
)
|
||||
|
||||
def __deserialize_file(self, response):
|
||||
"""Deserializes body to file
|
||||
|
||||
Saves response body into a file in a temporary folder,
|
||||
using the filename from the `Content-Disposition` header if provided.
|
||||
|
||||
:param response: RESTResponse.
|
||||
:return: file path.
|
||||
"""
|
||||
fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path)
|
||||
os.close(fd)
|
||||
os.remove(path)
|
||||
|
||||
content_disposition = response.getheader("Content-Disposition")
|
||||
if content_disposition:
|
||||
filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?',
|
||||
content_disposition).group(1)
|
||||
path = os.path.join(os.path.dirname(path), filename)
|
||||
|
||||
with open(path, "wb") as f:
|
||||
f.write(response.data)
|
||||
|
||||
return path
|
||||
|
||||
def __deserialize_primitive(self, data, klass):
|
||||
"""Deserializes string to primitive type.
|
||||
|
||||
:param data: str.
|
||||
:param klass: class literal.
|
||||
|
||||
:return: int, long, float, str, bool.
|
||||
"""
|
||||
try:
|
||||
return klass(data)
|
||||
except UnicodeEncodeError:
|
||||
return six.text_type(data)
|
||||
except TypeError:
|
||||
return data
|
||||
|
||||
def __deserialize_object(self, value):
|
||||
"""Return a original value.
|
||||
|
||||
:return: object.
|
||||
"""
|
||||
return value
|
||||
|
||||
def __deserialize_date(self, string):
|
||||
"""Deserializes string to date.
|
||||
|
||||
:param string: str.
|
||||
:return: date.
|
||||
"""
|
||||
try:
|
||||
from dateutil.parser import parse
|
||||
return parse(string).date()
|
||||
except ImportError:
|
||||
return string
|
||||
except ValueError:
|
||||
raise rest.ApiException(
|
||||
status=0,
|
||||
reason="Failed to parse `{0}` as date object".format(string)
|
||||
)
|
||||
|
||||
def __deserialize_datatime(self, string):
|
||||
"""Deserializes string to datetime.
|
||||
|
||||
The string should be in iso8601 datetime format.
|
||||
|
||||
:param string: str.
|
||||
:return: datetime.
|
||||
"""
|
||||
try:
|
||||
from dateutil.parser import parse
|
||||
return parse(string)
|
||||
except ImportError:
|
||||
return string
|
||||
except ValueError:
|
||||
raise rest.ApiException(
|
||||
status=0,
|
||||
reason=(
|
||||
"Failed to parse `{0}` as datetime object"
|
||||
.format(string)
|
||||
)
|
||||
)
|
||||
|
||||
def __deserialize_model(self, data, klass):
|
||||
"""Deserializes list or dict to model.
|
||||
|
||||
:param data: dict, list.
|
||||
:param klass: class literal.
|
||||
:return: model object.
|
||||
"""
|
||||
|
||||
if not klass.swagger_types and not hasattr(klass,
|
||||
'get_real_child_model'):
|
||||
return data
|
||||
|
||||
kwargs = {}
|
||||
if klass.swagger_types is not None:
|
||||
for attr, attr_type in six.iteritems(klass.swagger_types):
|
||||
if (data is not None and
|
||||
klass.attribute_map[attr] in data and
|
||||
isinstance(data, (list, dict))):
|
||||
value = data[klass.attribute_map[attr]]
|
||||
kwargs[attr] = self.__deserialize(value, attr_type)
|
||||
|
||||
instance = klass(**kwargs)
|
||||
|
||||
if hasattr(instance, 'get_real_child_model'):
|
||||
klass_name = instance.get_real_child_model(data)
|
||||
if klass_name:
|
||||
instance = self.__deserialize(data, klass_name)
|
||||
return instance
|
1095
.local/lib/python3.8/site-packages/kaggle/cli.py
Normal file
1095
.local/lib/python3.8/site-packages/kaggle/cli.py
Normal file
File diff suppressed because it is too large
Load Diff
263
.local/lib/python3.8/site-packages/kaggle/configuration.py
Normal file
263
.local/lib/python3.8/site-packages/kaggle/configuration.py
Normal file
@ -0,0 +1,263 @@
|
||||
#!/usr/bin/python
|
||||
#
|
||||
# Copyright 2021 Kaggle Inc
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Kaggle API
|
||||
|
||||
API for kaggle.com # noqa: E501
|
||||
|
||||
OpenAPI spec version: 1
|
||||
|
||||
Generated by: https://github.com/swagger-api/swagger-codegen.git
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import multiprocessing
|
||||
import sys
|
||||
import urllib3
|
||||
|
||||
import six
|
||||
from six.moves import http_client as httplib
|
||||
|
||||
|
||||
class TypeWithDefault(type):
|
||||
def __init__(cls, name, bases, dct):
|
||||
super(TypeWithDefault, cls).__init__(name, bases, dct)
|
||||
cls._default = None
|
||||
|
||||
def __call__(cls):
|
||||
if cls._default is None:
|
||||
cls._default = type.__call__(cls)
|
||||
return copy.copy(cls._default)
|
||||
|
||||
def set_default(cls, default):
|
||||
cls._default = copy.copy(default)
|
||||
|
||||
|
||||
class Configuration(six.with_metaclass(TypeWithDefault, object)):
|
||||
"""NOTE: This class is auto generated by the swagger code generator program.
|
||||
|
||||
Ref: https://github.com/swagger-api/swagger-codegen
|
||||
Do not edit the class manually.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Constructor"""
|
||||
# Default Base url
|
||||
self.host = "https://www.kaggle.com/api/v1"
|
||||
# Temp file folder for downloading files
|
||||
self.temp_folder_path = None
|
||||
|
||||
# Authentication Settings
|
||||
# dict to store API key(s)
|
||||
self.api_key = {}
|
||||
# dict to store API prefix (e.g. Bearer)
|
||||
self.api_key_prefix = {}
|
||||
# Username for HTTP basic authentication
|
||||
self.username = ""
|
||||
# Password for HTTP basic authentication
|
||||
self.password = ""
|
||||
|
||||
# Logging Settings
|
||||
self.logger = {}
|
||||
self.logger["package_logger"] = logging.getLogger("kaggle")
|
||||
self.logger["urllib3_logger"] = logging.getLogger("urllib3")
|
||||
# Log format
|
||||
self.logger_format = '%(asctime)s %(levelname)s %(message)s'
|
||||
# Log stream handler
|
||||
self.logger_stream_handler = None
|
||||
# Log file handler
|
||||
self.logger_file_handler = None
|
||||
# Debug file location
|
||||
self.logger_file = None
|
||||
# Debug switch
|
||||
self.debug = False
|
||||
|
||||
# SSL/TLS verification
|
||||
# Set this to false to skip verifying SSL certificate when calling API
|
||||
# from https server.
|
||||
self.verify_ssl = True
|
||||
# Set this to customize the certificate file to verify the peer.
|
||||
self.ssl_ca_cert = None
|
||||
# client certificate file
|
||||
self.cert_file = None
|
||||
# client key file
|
||||
self.key_file = None
|
||||
# Set this to True/False to enable/disable SSL hostname verification.
|
||||
self.assert_hostname = None
|
||||
|
||||
# urllib3 connection pool's maximum number of connections saved
|
||||
# per pool. urllib3 uses 1 connection as default value, but this is
|
||||
# not the best value when you are making a lot of possibly parallel
|
||||
# requests to the same host, which is often the case here.
|
||||
# cpu_count * 5 is used as default value to increase performance.
|
||||
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
|
||||
|
||||
# Proxy URL
|
||||
self.proxy = None
|
||||
# Safe chars for path_param
|
||||
self.safe_chars_for_path_param = ''
|
||||
|
||||
@property
|
||||
def logger_file(self):
|
||||
"""The logger file.
|
||||
|
||||
If the logger_file is None, then add stream handler and remove file
|
||||
handler. Otherwise, add file handler and remove stream handler.
|
||||
|
||||
:param value: The logger_file path.
|
||||
:type: str
|
||||
"""
|
||||
return self.__logger_file
|
||||
|
||||
@logger_file.setter
|
||||
def logger_file(self, value):
|
||||
"""The logger file.
|
||||
|
||||
If the logger_file is None, then add stream handler and remove file
|
||||
handler. Otherwise, add file handler and remove stream handler.
|
||||
|
||||
:param value: The logger_file path.
|
||||
:type: str
|
||||
"""
|
||||
self.__logger_file = value
|
||||
if self.__logger_file:
|
||||
# If set logging file,
|
||||
# then add file handler and remove stream handler.
|
||||
self.logger_file_handler = logging.FileHandler(self.__logger_file)
|
||||
self.logger_file_handler.setFormatter(self.logger_formatter)
|
||||
for _, logger in six.iteritems(self.logger):
|
||||
logger.addHandler(self.logger_file_handler)
|
||||
if self.logger_stream_handler:
|
||||
logger.removeHandler(self.logger_stream_handler)
|
||||
else:
|
||||
# If not set logging file,
|
||||
# then add stream handler and remove file handler.
|
||||
self.logger_stream_handler = logging.StreamHandler()
|
||||
self.logger_stream_handler.setFormatter(self.logger_formatter)
|
||||
for _, logger in six.iteritems(self.logger):
|
||||
logger.addHandler(self.logger_stream_handler)
|
||||
if self.logger_file_handler:
|
||||
logger.removeHandler(self.logger_file_handler)
|
||||
|
||||
@property
|
||||
def debug(self):
|
||||
"""Debug status
|
||||
|
||||
:param value: The debug status, True or False.
|
||||
:type: bool
|
||||
"""
|
||||
return self.__debug
|
||||
|
||||
@debug.setter
|
||||
def debug(self, value):
|
||||
"""Debug status
|
||||
|
||||
:param value: The debug status, True or False.
|
||||
:type: bool
|
||||
"""
|
||||
self.__debug = value
|
||||
if self.__debug:
|
||||
# if debug status is True, turn on debug logging
|
||||
for _, logger in six.iteritems(self.logger):
|
||||
logger.setLevel(logging.DEBUG)
|
||||
# turn on httplib debug
|
||||
httplib.HTTPConnection.debuglevel = 1
|
||||
else:
|
||||
# if debug status is False, turn off debug logging,
|
||||
# setting log level to default `logging.WARNING`
|
||||
for _, logger in six.iteritems(self.logger):
|
||||
logger.setLevel(logging.WARNING)
|
||||
# turn off httplib debug
|
||||
httplib.HTTPConnection.debuglevel = 0
|
||||
|
||||
@property
|
||||
def logger_format(self):
|
||||
"""The logger format.
|
||||
|
||||
The logger_formatter will be updated when sets logger_format.
|
||||
|
||||
:param value: The format string.
|
||||
:type: str
|
||||
"""
|
||||
return self.__logger_format
|
||||
|
||||
@logger_format.setter
|
||||
def logger_format(self, value):
|
||||
"""The logger format.
|
||||
|
||||
The logger_formatter will be updated when sets logger_format.
|
||||
|
||||
:param value: The format string.
|
||||
:type: str
|
||||
"""
|
||||
self.__logger_format = value
|
||||
self.logger_formatter = logging.Formatter(self.__logger_format)
|
||||
|
||||
def get_api_key_with_prefix(self, identifier):
|
||||
"""Gets API key (with prefix if set).
|
||||
|
||||
:param identifier: The identifier of apiKey.
|
||||
:return: The token for api key authentication.
|
||||
"""
|
||||
if (self.api_key.get(identifier) and
|
||||
self.api_key_prefix.get(identifier)):
|
||||
return self.api_key_prefix[identifier] + ' ' + self.api_key[identifier] # noqa: E501
|
||||
elif self.api_key.get(identifier):
|
||||
return self.api_key[identifier]
|
||||
|
||||
def get_basic_auth_token(self):
|
||||
"""Gets HTTP basic authentication header (string).
|
||||
|
||||
:return: The token for basic HTTP authentication.
|
||||
"""
|
||||
return urllib3.util.make_headers(
|
||||
basic_auth=self.username + ':' + self.password
|
||||
).get('authorization')
|
||||
|
||||
def auth_settings(self):
|
||||
"""Gets Auth Settings dict for api client.
|
||||
|
||||
:return: The Auth Settings information dict.
|
||||
"""
|
||||
return {
|
||||
'basicAuth':
|
||||
{
|
||||
'type': 'basic',
|
||||
'in': 'header',
|
||||
'key': 'Authorization',
|
||||
'value': self.get_basic_auth_token()
|
||||
},
|
||||
|
||||
}
|
||||
|
||||
def to_debug_report(self):
|
||||
"""Gets the essential information for debugging.
|
||||
|
||||
:return: The report for debugging.
|
||||
"""
|
||||
return "Python SDK Debug Report:\n"\
|
||||
"OS: {env}\n"\
|
||||
"Python Version: {pyversion}\n"\
|
||||
"Version of the API: 1\n"\
|
||||
"SDK Package Version: 1".\
|
||||
format(env=sys.platform, pyversion=sys.version)
|
43
.local/lib/python3.8/site-packages/kaggle/models/__init__.py
Normal file
43
.local/lib/python3.8/site-packages/kaggle/models/__init__.py
Normal file
@ -0,0 +1,43 @@
|
||||
#!/usr/bin/python
|
||||
#
|
||||
# Copyright 2021 Kaggle Inc
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# coding: utf-8
|
||||
|
||||
# flake8: noqa
|
||||
"""
|
||||
Kaggle API
|
||||
|
||||
API for kaggle.com # noqa: E501
|
||||
|
||||
OpenAPI spec version: 1
|
||||
|
||||
Generated by: https://github.com/swagger-api/swagger-codegen.git
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
# import models into model package
|
||||
from kaggle.models.collaborator import Collaborator
|
||||
from kaggle.models.dataset_column import DatasetColumn
|
||||
from kaggle.models.dataset_new_request import DatasetNewRequest
|
||||
from kaggle.models.dataset_new_version_request import DatasetNewVersionRequest
|
||||
from kaggle.models.dataset_update_settings_request import DatasetUpdateSettingsRequest
|
||||
from kaggle.models.dataset_upload_file import DatasetUploadFile
|
||||
from kaggle.models.error import Error
|
||||
from kaggle.models.kernel_push_request import KernelPushRequest
|
||||
from kaggle.models.license import License
|
||||
from kaggle.models.result import Result
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
166
.local/lib/python3.8/site-packages/kaggle/models/collaborator.py
Normal file
166
.local/lib/python3.8/site-packages/kaggle/models/collaborator.py
Normal file
@ -0,0 +1,166 @@
|
||||
#!/usr/bin/python
|
||||
#
|
||||
# Copyright 2021 Kaggle Inc
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Kaggle API
|
||||
|
||||
API for kaggle.com # noqa: E501
|
||||
|
||||
OpenAPI spec version: 1
|
||||
|
||||
Generated by: https://github.com/swagger-api/swagger-codegen.git
|
||||
"""
|
||||
|
||||
|
||||
import pprint
|
||||
import re # noqa: F401
|
||||
|
||||
import six
|
||||
|
||||
|
||||
class Collaborator(object):
|
||||
"""NOTE: This class is auto generated by the swagger code generator program.
|
||||
|
||||
Do not edit the class manually.
|
||||
"""
|
||||
|
||||
"""
|
||||
Attributes:
|
||||
swagger_types (dict): The key is attribute name
|
||||
and the value is attribute type.
|
||||
attribute_map (dict): The key is attribute name
|
||||
and the value is json key in definition.
|
||||
"""
|
||||
swagger_types = {
|
||||
'username': 'str',
|
||||
'role': 'str'
|
||||
}
|
||||
|
||||
attribute_map = {
|
||||
'username': 'username',
|
||||
'role': 'role'
|
||||
}
|
||||
|
||||
def __init__(self, username=None, role=None): # noqa: E501
|
||||
"""Collaborator - a model defined in Swagger""" # noqa: E501
|
||||
|
||||
self._username = None
|
||||
self._role = None
|
||||
self.discriminator = None
|
||||
|
||||
self.username = username
|
||||
self.role = role
|
||||
|
||||
@property
|
||||
def username(self):
|
||||
"""Gets the username of this Collaborator. # noqa: E501
|
||||
|
||||
Username of the collaborator # noqa: E501
|
||||
|
||||
:return: The username of this Collaborator. # noqa: E501
|
||||
:rtype: str
|
||||
"""
|
||||
return self._username
|
||||
|
||||
@username.setter
|
||||
def username(self, username):
|
||||
"""Sets the username of this Collaborator.
|
||||
|
||||
Username of the collaborator # noqa: E501
|
||||
|
||||
:param username: The username of this Collaborator. # noqa: E501
|
||||
:type: str
|
||||
"""
|
||||
if username is None:
|
||||
raise ValueError("Invalid value for `username`, must not be `None`") # noqa: E501
|
||||
|
||||
self._username = username
|
||||
|
||||
@property
|
||||
def role(self):
|
||||
"""Gets the role of this Collaborator. # noqa: E501
|
||||
|
||||
Role of the collaborator # noqa: E501
|
||||
|
||||
:return: The role of this Collaborator. # noqa: E501
|
||||
:rtype: str
|
||||
"""
|
||||
return self._role
|
||||
|
||||
@role.setter
|
||||
def role(self, role):
|
||||
"""Sets the role of this Collaborator.
|
||||
|
||||
Role of the collaborator # noqa: E501
|
||||
|
||||
:param role: The role of this Collaborator. # noqa: E501
|
||||
:type: str
|
||||
"""
|
||||
if role is None:
|
||||
raise ValueError("Invalid value for `role`, must not be `None`") # noqa: E501
|
||||
allowed_values = ["reader", "writer"] # noqa: E501
|
||||
if role not in allowed_values:
|
||||
raise ValueError(
|
||||
"Invalid value for `role` ({0}), must be one of {1}" # noqa: E501
|
||||
.format(role, allowed_values)
|
||||
)
|
||||
|
||||
self._role = role
|
||||
|
||||
def to_dict(self):
|
||||
"""Returns the model properties as a dict"""
|
||||
result = {}
|
||||
|
||||
for attr, _ in six.iteritems(self.swagger_types):
|
||||
value = getattr(self, attr)
|
||||
if isinstance(value, list):
|
||||
result[attr] = list(map(
|
||||
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
|
||||
value
|
||||
))
|
||||
elif hasattr(value, "to_dict"):
|
||||
result[attr] = value.to_dict()
|
||||
elif isinstance(value, dict):
|
||||
result[attr] = dict(map(
|
||||
lambda item: (item[0], item[1].to_dict())
|
||||
if hasattr(item[1], "to_dict") else item,
|
||||
value.items()
|
||||
))
|
||||
else:
|
||||
result[attr] = value
|
||||
|
||||
return result
|
||||
|
||||
def to_str(self):
|
||||
"""Returns the string representation of the model"""
|
||||
return pprint.pformat(self.to_dict())
|
||||
|
||||
def __repr__(self):
|
||||
"""For `print` and `pprint`"""
|
||||
return self.to_str()
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Returns true if both objects are equal"""
|
||||
if not isinstance(other, Collaborator):
|
||||
return False
|
||||
|
||||
return self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
"""Returns true if both objects are not equal"""
|
||||
return not self == other
|
@ -0,0 +1,242 @@
|
||||
#!/usr/bin/python
|
||||
#
|
||||
# Copyright 2021 Kaggle Inc
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Kaggle API
|
||||
|
||||
API for kaggle.com # noqa: E501
|
||||
|
||||
OpenAPI spec version: 1
|
||||
|
||||
Generated by: https://github.com/swagger-api/swagger-codegen.git
|
||||
"""
|
||||
|
||||
|
||||
import pprint
|
||||
import re # noqa: F401
|
||||
|
||||
import six
|
||||
|
||||
|
||||
class DatasetColumn(object):
|
||||
"""NOTE: This class is auto generated by the swagger code generator program.
|
||||
|
||||
Do not edit the class manually.
|
||||
"""
|
||||
|
||||
"""
|
||||
Attributes:
|
||||
swagger_types (dict): The key is attribute name
|
||||
and the value is attribute type.
|
||||
attribute_map (dict): The key is attribute name
|
||||
and the value is json key in definition.
|
||||
"""
|
||||
swagger_types = {
|
||||
'order': 'float',
|
||||
'name': 'str',
|
||||
'type': 'str',
|
||||
'original_type': 'str',
|
||||
'description': 'str'
|
||||
}
|
||||
|
||||
attribute_map = {
|
||||
'order': 'order',
|
||||
'name': 'name',
|
||||
'type': 'type',
|
||||
'original_type': 'originalType',
|
||||
'description': 'description'
|
||||
}
|
||||
|
||||
def __init__(self, order=None, name=None, type=None, original_type=None, description=None): # noqa: E501
|
||||
"""DatasetColumn - a model defined in Swagger""" # noqa: E501
|
||||
|
||||
self._order = None
|
||||
self._name = None
|
||||
self._type = None
|
||||
self._original_type = None
|
||||
self._description = None
|
||||
self.discriminator = None
|
||||
|
||||
if order is not None:
|
||||
self.order = order
|
||||
if name is not None:
|
||||
self.name = name
|
||||
if type is not None:
|
||||
self.type = type
|
||||
if original_type is not None:
|
||||
self.original_type = original_type
|
||||
if description is not None:
|
||||
self.description = description
|
||||
|
||||
@property
|
||||
def order(self):
|
||||
"""Gets the order of this DatasetColumn. # noqa: E501
|
||||
|
||||
The order that the column comes in, 0-based. (The first column is 0, second is 1, etc.) # noqa: E501
|
||||
|
||||
:return: The order of this DatasetColumn. # noqa: E501
|
||||
:rtype: float
|
||||
"""
|
||||
return self._order
|
||||
|
||||
@order.setter
|
||||
def order(self, order):
|
||||
"""Sets the order of this DatasetColumn.
|
||||
|
||||
The order that the column comes in, 0-based. (The first column is 0, second is 1, etc.) # noqa: E501
|
||||
|
||||
:param order: The order of this DatasetColumn. # noqa: E501
|
||||
:type: float
|
||||
"""
|
||||
|
||||
self._order = order
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Gets the name of this DatasetColumn. # noqa: E501
|
||||
|
||||
The column name # noqa: E501
|
||||
|
||||
:return: The name of this DatasetColumn. # noqa: E501
|
||||
:rtype: str
|
||||
"""
|
||||
return self._name
|
||||
|
||||
@name.setter
|
||||
def name(self, name):
|
||||
"""Sets the name of this DatasetColumn.
|
||||
|
||||
The column name # noqa: E501
|
||||
|
||||
:param name: The name of this DatasetColumn. # noqa: E501
|
||||
:type: str
|
||||
"""
|
||||
|
||||
self._name = name
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
"""Gets the type of this DatasetColumn. # noqa: E501
|
||||
|
||||
The type of all of the fields in the column. Please see the data types on https://github.com/Kaggle/kaggle-api/wiki/Dataset-Metadata # noqa: E501
|
||||
|
||||
:return: The type of this DatasetColumn. # noqa: E501
|
||||
:rtype: str
|
||||
"""
|
||||
return self._type
|
||||
|
||||
@type.setter
|
||||
def type(self, type):
|
||||
"""Sets the type of this DatasetColumn.
|
||||
|
||||
The type of all of the fields in the column. Please see the data types on https://github.com/Kaggle/kaggle-api/wiki/Dataset-Metadata # noqa: E501
|
||||
|
||||
:param type: The type of this DatasetColumn. # noqa: E501
|
||||
:type: str
|
||||
"""
|
||||
|
||||
self._type = type
|
||||
|
||||
@property
|
||||
def original_type(self):
|
||||
"""Gets the original_type of this DatasetColumn. # noqa: E501
|
||||
|
||||
Used to store the original type of the column, which will be converted to Kaggle's types. For example, an `originalType` of `\"integer\"` would convert to a `type` of `\"numeric\"` # noqa: E501
|
||||
|
||||
:return: The original_type of this DatasetColumn. # noqa: E501
|
||||
:rtype: str
|
||||
"""
|
||||
return self._original_type
|
||||
|
||||
@original_type.setter
|
||||
def original_type(self, original_type):
|
||||
"""Sets the original_type of this DatasetColumn.
|
||||
|
||||
Used to store the original type of the column, which will be converted to Kaggle's types. For example, an `originalType` of `\"integer\"` would convert to a `type` of `\"numeric\"` # noqa: E501
|
||||
|
||||
:param original_type: The original_type of this DatasetColumn. # noqa: E501
|
||||
:type: str
|
||||
"""
|
||||
|
||||
self._original_type = original_type
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
"""Gets the description of this DatasetColumn. # noqa: E501
|
||||
|
||||
The description of the column # noqa: E501
|
||||
|
||||
:return: The description of this DatasetColumn. # noqa: E501
|
||||
:rtype: str
|
||||
"""
|
||||
return self._description
|
||||
|
||||
@description.setter
|
||||
def description(self, description):
|
||||
"""Sets the description of this DatasetColumn.
|
||||
|
||||
The description of the column # noqa: E501
|
||||
|
||||
:param description: The description of this DatasetColumn. # noqa: E501
|
||||
:type: str
|
||||
"""
|
||||
|
||||
self._description = description
|
||||
|
||||
def to_dict(self):
|
||||
"""Returns the model properties as a dict"""
|
||||
result = {}
|
||||
|
||||
for attr, _ in six.iteritems(self.swagger_types):
|
||||
value = getattr(self, attr)
|
||||
if isinstance(value, list):
|
||||
result[attr] = list(map(
|
||||
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
|
||||
value
|
||||
))
|
||||
elif hasattr(value, "to_dict"):
|
||||
result[attr] = value.to_dict()
|
||||
elif isinstance(value, dict):
|
||||
result[attr] = dict(map(
|
||||
lambda item: (item[0], item[1].to_dict())
|
||||
if hasattr(item[1], "to_dict") else item,
|
||||
value.items()
|
||||
))
|
||||
else:
|
||||
result[attr] = value
|
||||
|
||||
return result
|
||||
|
||||
def to_str(self):
|
||||
"""Returns the string representation of the model"""
|
||||
return pprint.pformat(self.to_dict())
|
||||
|
||||
def __repr__(self):
|
||||
"""For `print` and `pprint`"""
|
||||
return self.to_str()
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Returns true if both objects are equal"""
|
||||
if not isinstance(other, DatasetColumn):
|
||||
return False
|
||||
|
||||
return self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
"""Returns true if both objects are not equal"""
|
||||
return not self == other
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user