summaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authormorpheus65535 <[email protected]>2022-09-22 06:33:33 -0400
committermorpheus65535 <[email protected]>2022-09-22 06:33:33 -0400
commita338de147e8a4d74ca266b1306997fcfc90b8941 (patch)
tree6970a11c1c6126677f00e153107c2812a37f069a
parent131b4e5cde4034f78923d7eaebd49b3550f8aa13 (diff)
downloadbazarr-a338de147e8a4d74ca266b1306997fcfc90b8941.tar.gz
bazarr-a338de147e8a4d74ca266b1306997fcfc90b8941.zip
Fixed import error after last commit.v1.1.2-beta.8
-rw-r--r--libs/aniso8601/__init__.py26
-rw-r--r--libs/aniso8601/builders/__init__.py614
-rw-r--r--libs/aniso8601/builders/python.py705
-rw-r--r--libs/aniso8601/builders/tests/__init__.py7
-rw-r--r--libs/aniso8601/builders/tests/test_init.py838
-rw-r--r--libs/aniso8601/builders/tests/test_python.py1710
-rw-r--r--libs/aniso8601/compat.py24
-rw-r--r--libs/aniso8601/date.py161
-rw-r--r--libs/aniso8601/decimalfraction.py12
-rw-r--r--libs/aniso8601/duration.py291
-rw-r--r--libs/aniso8601/exceptions.py51
-rw-r--r--libs/aniso8601/interval.py350
-rw-r--r--libs/aniso8601/resolution.py27
-rw-r--r--libs/aniso8601/tests/__init__.py7
-rw-r--r--libs/aniso8601/tests/compat.py16
-rw-r--r--libs/aniso8601/tests/test_compat.py27
-rw-r--r--libs/aniso8601/tests/test_date.py303
-rw-r--r--libs/aniso8601/tests/test_decimalfraction.py19
-rw-r--r--libs/aniso8601/tests/test_duration.py1402
-rw-r--r--libs/aniso8601/tests/test_init.py49
-rw-r--r--libs/aniso8601/tests/test_interval.py1675
-rw-r--r--libs/aniso8601/tests/test_time.py539
-rw-r--r--libs/aniso8601/tests/test_timezone.py123
-rw-r--r--libs/aniso8601/tests/test_utcoffset.py56
-rw-r--r--libs/aniso8601/time.py203
-rw-r--r--libs/aniso8601/timezone.py62
-rw-r--r--libs/aniso8601/utcoffset.py71
-rw-r--r--libs/attr/__init__.py79
-rw-r--r--libs/attr/__init__.pyi486
-rw-r--r--libs/attr/_cmp.py155
-rw-r--r--libs/attr/_cmp.pyi13
-rw-r--r--libs/attr/_compat.py185
-rw-r--r--libs/attr/_config.py31
-rw-r--r--libs/attr/_funcs.py420
-rw-r--r--libs/attr/_make.py3006
-rw-r--r--libs/attr/_next_gen.py220
-rw-r--r--libs/attr/_version_info.py86
-rw-r--r--libs/attr/_version_info.pyi9
-rw-r--r--libs/attr/converters.py144
-rw-r--r--libs/attr/converters.pyi13
-rw-r--r--libs/attr/exceptions.py92
-rw-r--r--libs/attr/exceptions.pyi17
-rw-r--r--libs/attr/filters.py51
-rw-r--r--libs/attr/filters.pyi6
-rw-r--r--libs/attr/py.typed0
-rw-r--r--libs/attr/setters.py73
-rw-r--r--libs/attr/setters.pyi19
-rw-r--r--libs/attr/validators.py594
-rw-r--r--libs/attr/validators.pyi80
-rw-r--r--libs/attrs/__init__.py70
-rw-r--r--libs/attrs/__init__.pyi66
-rw-r--r--libs/attrs/converters.py3
-rw-r--r--libs/attrs/exceptions.py3
-rw-r--r--libs/attrs/filters.py3
-rw-r--r--libs/attrs/py.typed0
-rw-r--r--libs/attrs/setters.py3
-rw-r--r--libs/attrs/validators.py3
-rw-r--r--libs/version.txt2
58 files changed, 15300 insertions, 0 deletions
diff --git a/libs/aniso8601/__init__.py b/libs/aniso8601/__init__.py
new file mode 100644
index 000000000..033d30b9d
--- /dev/null
+++ b/libs/aniso8601/__init__.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+from aniso8601.date import get_date_resolution, parse_date
+from aniso8601.duration import get_duration_resolution, parse_duration
+from aniso8601.interval import (
+ get_interval_resolution,
+ get_repeating_interval_resolution,
+ parse_interval,
+ parse_repeating_interval,
+)
+
+# Import the main parsing functions so they are readily available
+from aniso8601.time import (
+ get_datetime_resolution,
+ get_time_resolution,
+ parse_datetime,
+ parse_time,
+)
+
+__version__ = "9.0.1"
diff --git a/libs/aniso8601/builders/__init__.py b/libs/aniso8601/builders/__init__.py
new file mode 100644
index 000000000..834c72a6b
--- /dev/null
+++ b/libs/aniso8601/builders/__init__.py
@@ -0,0 +1,614 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+import calendar
+from collections import namedtuple
+
+from aniso8601.exceptions import (
+ DayOutOfBoundsError,
+ HoursOutOfBoundsError,
+ ISOFormatError,
+ LeapSecondError,
+ MidnightBoundsError,
+ MinutesOutOfBoundsError,
+ MonthOutOfBoundsError,
+ SecondsOutOfBoundsError,
+ WeekOutOfBoundsError,
+ YearOutOfBoundsError,
+)
+
+DateTuple = namedtuple("Date", ["YYYY", "MM", "DD", "Www", "D", "DDD"])
+TimeTuple = namedtuple("Time", ["hh", "mm", "ss", "tz"])
+DatetimeTuple = namedtuple("Datetime", ["date", "time"])
+DurationTuple = namedtuple(
+ "Duration", ["PnY", "PnM", "PnW", "PnD", "TnH", "TnM", "TnS"]
+)
+IntervalTuple = namedtuple("Interval", ["start", "end", "duration"])
+RepeatingIntervalTuple = namedtuple("RepeatingInterval", ["R", "Rnn", "interval"])
+TimezoneTuple = namedtuple("Timezone", ["negative", "Z", "hh", "mm", "name"])
+
+Limit = namedtuple(
+ "Limit",
+ [
+ "casterrorstring",
+ "min",
+ "max",
+ "rangeexception",
+ "rangeerrorstring",
+ "rangefunc",
+ ],
+)
+
+
+def cast(
+ value,
+ castfunction,
+ caughtexceptions=(ValueError,),
+ thrownexception=ISOFormatError,
+ thrownmessage=None,
+):
+ try:
+ result = castfunction(value)
+ except caughtexceptions:
+ raise thrownexception(thrownmessage)
+
+ return result
+
+
+def range_check(valuestr, limit):
+ # Returns cast value if in range, raises defined exceptions on failure
+ if valuestr is None:
+ return None
+
+ if "." in valuestr:
+ castfunc = float
+ else:
+ castfunc = int
+
+ value = cast(valuestr, castfunc, thrownmessage=limit.casterrorstring)
+
+ if limit.min is not None and value < limit.min:
+ raise limit.rangeexception(limit.rangeerrorstring)
+
+ if limit.max is not None and value > limit.max:
+ raise limit.rangeexception(limit.rangeerrorstring)
+
+ return value
+
+
+class BaseTimeBuilder(object):
+ # Limit tuple format cast function, cast error string,
+ # lower limit, upper limit, limit error string
+ DATE_YYYY_LIMIT = Limit(
+ "Invalid year string.",
+ 0000,
+ 9999,
+ YearOutOfBoundsError,
+ "Year must be between 1..9999.",
+ range_check,
+ )
+ DATE_MM_LIMIT = Limit(
+ "Invalid month string.",
+ 1,
+ 12,
+ MonthOutOfBoundsError,
+ "Month must be between 1..12.",
+ range_check,
+ )
+ DATE_DD_LIMIT = Limit(
+ "Invalid day string.",
+ 1,
+ 31,
+ DayOutOfBoundsError,
+ "Day must be between 1..31.",
+ range_check,
+ )
+ DATE_WWW_LIMIT = Limit(
+ "Invalid week string.",
+ 1,
+ 53,
+ WeekOutOfBoundsError,
+ "Week number must be between 1..53.",
+ range_check,
+ )
+ DATE_D_LIMIT = Limit(
+ "Invalid weekday string.",
+ 1,
+ 7,
+ DayOutOfBoundsError,
+ "Weekday number must be between 1..7.",
+ range_check,
+ )
+ DATE_DDD_LIMIT = Limit(
+ "Invalid ordinal day string.",
+ 1,
+ 366,
+ DayOutOfBoundsError,
+ "Ordinal day must be between 1..366.",
+ range_check,
+ )
+ TIME_HH_LIMIT = Limit(
+ "Invalid hour string.",
+ 0,
+ 24,
+ HoursOutOfBoundsError,
+ "Hour must be between 0..24 with " "24 representing midnight.",
+ range_check,
+ )
+ TIME_MM_LIMIT = Limit(
+ "Invalid minute string.",
+ 0,
+ 59,
+ MinutesOutOfBoundsError,
+ "Minute must be between 0..59.",
+ range_check,
+ )
+ TIME_SS_LIMIT = Limit(
+ "Invalid second string.",
+ 0,
+ 60,
+ SecondsOutOfBoundsError,
+ "Second must be between 0..60 with " "60 representing a leap second.",
+ range_check,
+ )
+ TZ_HH_LIMIT = Limit(
+ "Invalid timezone hour string.",
+ 0,
+ 23,
+ HoursOutOfBoundsError,
+ "Hour must be between 0..23.",
+ range_check,
+ )
+ TZ_MM_LIMIT = Limit(
+ "Invalid timezone minute string.",
+ 0,
+ 59,
+ MinutesOutOfBoundsError,
+ "Minute must be between 0..59.",
+ range_check,
+ )
+ DURATION_PNY_LIMIT = Limit(
+ "Invalid year duration string.",
+ 0,
+ None,
+ ISOFormatError,
+ "Duration years component must be positive.",
+ range_check,
+ )
+ DURATION_PNM_LIMIT = Limit(
+ "Invalid month duration string.",
+ 0,
+ None,
+ ISOFormatError,
+ "Duration months component must be positive.",
+ range_check,
+ )
+ DURATION_PNW_LIMIT = Limit(
+ "Invalid week duration string.",
+ 0,
+ None,
+ ISOFormatError,
+ "Duration weeks component must be positive.",
+ range_check,
+ )
+ DURATION_PND_LIMIT = Limit(
+ "Invalid day duration string.",
+ 0,
+ None,
+ ISOFormatError,
+ "Duration days component must be positive.",
+ range_check,
+ )
+ DURATION_TNH_LIMIT = Limit(
+ "Invalid hour duration string.",
+ 0,
+ None,
+ ISOFormatError,
+ "Duration hours component must be positive.",
+ range_check,
+ )
+ DURATION_TNM_LIMIT = Limit(
+ "Invalid minute duration string.",
+ 0,
+ None,
+ ISOFormatError,
+ "Duration minutes component must be positive.",
+ range_check,
+ )
+ DURATION_TNS_LIMIT = Limit(
+ "Invalid second duration string.",
+ 0,
+ None,
+ ISOFormatError,
+ "Duration seconds component must be positive.",
+ range_check,
+ )
+ INTERVAL_RNN_LIMIT = Limit(
+ "Invalid duration repetition string.",
+ 0,
+ None,
+ ISOFormatError,
+ "Duration repetition count must be positive.",
+ range_check,
+ )
+
+ DATE_RANGE_DICT = {
+ "YYYY": DATE_YYYY_LIMIT,
+ "MM": DATE_MM_LIMIT,
+ "DD": DATE_DD_LIMIT,
+ "Www": DATE_WWW_LIMIT,
+ "D": DATE_D_LIMIT,
+ "DDD": DATE_DDD_LIMIT,
+ }
+
+ TIME_RANGE_DICT = {"hh": TIME_HH_LIMIT, "mm": TIME_MM_LIMIT, "ss": TIME_SS_LIMIT}
+
+ DURATION_RANGE_DICT = {
+ "PnY": DURATION_PNY_LIMIT,
+ "PnM": DURATION_PNM_LIMIT,
+ "PnW": DURATION_PNW_LIMIT,
+ "PnD": DURATION_PND_LIMIT,
+ "TnH": DURATION_TNH_LIMIT,
+ "TnM": DURATION_TNM_LIMIT,
+ "TnS": DURATION_TNS_LIMIT,
+ }
+
+ REPEATING_INTERVAL_RANGE_DICT = {"Rnn": INTERVAL_RNN_LIMIT}
+
+ TIMEZONE_RANGE_DICT = {"hh": TZ_HH_LIMIT, "mm": TZ_MM_LIMIT}
+
+ LEAP_SECONDS_SUPPORTED = False
+
+ @classmethod
+ def build_date(cls, YYYY=None, MM=None, DD=None, Www=None, D=None, DDD=None):
+ raise NotImplementedError
+
+ @classmethod
+ def build_time(cls, hh=None, mm=None, ss=None, tz=None):
+ raise NotImplementedError
+
+ @classmethod
+ def build_datetime(cls, date, time):
+ raise NotImplementedError
+
+ @classmethod
+ def build_duration(
+ cls, PnY=None, PnM=None, PnW=None, PnD=None, TnH=None, TnM=None, TnS=None
+ ):
+ raise NotImplementedError
+
+ @classmethod
+ def build_interval(cls, start=None, end=None, duration=None):
+ # start, end, and duration are all tuples
+ raise NotImplementedError
+
+ @classmethod
+ def build_repeating_interval(cls, R=None, Rnn=None, interval=None):
+ # interval is a tuple
+ raise NotImplementedError
+
+ @classmethod
+ def build_timezone(cls, negative=None, Z=None, hh=None, mm=None, name=""):
+ raise NotImplementedError
+
+ @classmethod
+ def range_check_date(
+ cls, YYYY=None, MM=None, DD=None, Www=None, D=None, DDD=None, rangedict=None
+ ):
+ if rangedict is None:
+ rangedict = cls.DATE_RANGE_DICT
+
+ if "YYYY" in rangedict:
+ YYYY = rangedict["YYYY"].rangefunc(YYYY, rangedict["YYYY"])
+
+ if "MM" in rangedict:
+ MM = rangedict["MM"].rangefunc(MM, rangedict["MM"])
+
+ if "DD" in rangedict:
+ DD = rangedict["DD"].rangefunc(DD, rangedict["DD"])
+
+ if "Www" in rangedict:
+ Www = rangedict["Www"].rangefunc(Www, rangedict["Www"])
+
+ if "D" in rangedict:
+ D = rangedict["D"].rangefunc(D, rangedict["D"])
+
+ if "DDD" in rangedict:
+ DDD = rangedict["DDD"].rangefunc(DDD, rangedict["DDD"])
+
+ if DD is not None:
+ # Check calendar
+ if DD > calendar.monthrange(YYYY, MM)[1]:
+ raise DayOutOfBoundsError(
+ "{0} is out of range for {1}-{2}".format(DD, YYYY, MM)
+ )
+
+ if DDD is not None:
+ if calendar.isleap(YYYY) is False and DDD == 366:
+ raise DayOutOfBoundsError(
+ "{0} is only valid for leap year.".format(DDD)
+ )
+
+ return (YYYY, MM, DD, Www, D, DDD)
+
+ @classmethod
+ def range_check_time(cls, hh=None, mm=None, ss=None, tz=None, rangedict=None):
+ # Used for midnight and leap second handling
+ midnight = False # Handle hh = '24' specially
+
+ if rangedict is None:
+ rangedict = cls.TIME_RANGE_DICT
+
+ if "hh" in rangedict:
+ try:
+ hh = rangedict["hh"].rangefunc(hh, rangedict["hh"])
+ except HoursOutOfBoundsError as e:
+ if float(hh) > 24 and float(hh) < 25:
+ raise MidnightBoundsError("Hour 24 may only represent midnight.")
+
+ raise e
+
+ if "mm" in rangedict:
+ mm = rangedict["mm"].rangefunc(mm, rangedict["mm"])
+
+ if "ss" in rangedict:
+ ss = rangedict["ss"].rangefunc(ss, rangedict["ss"])
+
+ if hh is not None and hh == 24:
+ midnight = True
+
+ # Handle midnight range
+ if midnight is True and (
+ (mm is not None and mm != 0) or (ss is not None and ss != 0)
+ ):
+ raise MidnightBoundsError("Hour 24 may only represent midnight.")
+
+ if cls.LEAP_SECONDS_SUPPORTED is True:
+ if hh != 23 and mm != 59 and ss == 60:
+ raise cls.TIME_SS_LIMIT.rangeexception(
+ cls.TIME_SS_LIMIT.rangeerrorstring
+ )
+ else:
+ if hh == 23 and mm == 59 and ss == 60:
+ # https://bitbucket.org/nielsenb/aniso8601/issues/10/sub-microsecond-precision-in-durations-is
+ raise LeapSecondError("Leap seconds are not supported.")
+
+ if ss == 60:
+ raise cls.TIME_SS_LIMIT.rangeexception(
+ cls.TIME_SS_LIMIT.rangeerrorstring
+ )
+
+ return (hh, mm, ss, tz)
+
+ @classmethod
+ def range_check_duration(
+ cls,
+ PnY=None,
+ PnM=None,
+ PnW=None,
+ PnD=None,
+ TnH=None,
+ TnM=None,
+ TnS=None,
+ rangedict=None,
+ ):
+ if rangedict is None:
+ rangedict = cls.DURATION_RANGE_DICT
+
+ if "PnY" in rangedict:
+ PnY = rangedict["PnY"].rangefunc(PnY, rangedict["PnY"])
+
+ if "PnM" in rangedict:
+ PnM = rangedict["PnM"].rangefunc(PnM, rangedict["PnM"])
+
+ if "PnW" in rangedict:
+ PnW = rangedict["PnW"].rangefunc(PnW, rangedict["PnW"])
+
+ if "PnD" in rangedict:
+ PnD = rangedict["PnD"].rangefunc(PnD, rangedict["PnD"])
+
+ if "TnH" in rangedict:
+ TnH = rangedict["TnH"].rangefunc(TnH, rangedict["TnH"])
+
+ if "TnM" in rangedict:
+ TnM = rangedict["TnM"].rangefunc(TnM, rangedict["TnM"])
+
+ if "TnS" in rangedict:
+ TnS = rangedict["TnS"].rangefunc(TnS, rangedict["TnS"])
+
+ return (PnY, PnM, PnW, PnD, TnH, TnM, TnS)
+
+ @classmethod
+ def range_check_repeating_interval(
+ cls, R=None, Rnn=None, interval=None, rangedict=None
+ ):
+ if rangedict is None:
+ rangedict = cls.REPEATING_INTERVAL_RANGE_DICT
+
+ if "Rnn" in rangedict:
+ Rnn = rangedict["Rnn"].rangefunc(Rnn, rangedict["Rnn"])
+
+ return (R, Rnn, interval)
+
+ @classmethod
+ def range_check_timezone(
+ cls, negative=None, Z=None, hh=None, mm=None, name="", rangedict=None
+ ):
+ if rangedict is None:
+ rangedict = cls.TIMEZONE_RANGE_DICT
+
+ if "hh" in rangedict:
+ hh = rangedict["hh"].rangefunc(hh, rangedict["hh"])
+
+ if "mm" in rangedict:
+ mm = rangedict["mm"].rangefunc(mm, rangedict["mm"])
+
+ return (negative, Z, hh, mm, name)
+
+ @classmethod
+ def _build_object(cls, parsetuple):
+ # Given a TupleBuilder tuple, build the correct object
+ if type(parsetuple) is DateTuple:
+ return cls.build_date(
+ YYYY=parsetuple.YYYY,
+ MM=parsetuple.MM,
+ DD=parsetuple.DD,
+ Www=parsetuple.Www,
+ D=parsetuple.D,
+ DDD=parsetuple.DDD,
+ )
+
+ if type(parsetuple) is TimeTuple:
+ return cls.build_time(
+ hh=parsetuple.hh, mm=parsetuple.mm, ss=parsetuple.ss, tz=parsetuple.tz
+ )
+
+ if type(parsetuple) is DatetimeTuple:
+ return cls.build_datetime(parsetuple.date, parsetuple.time)
+
+ if type(parsetuple) is DurationTuple:
+ return cls.build_duration(
+ PnY=parsetuple.PnY,
+ PnM=parsetuple.PnM,
+ PnW=parsetuple.PnW,
+ PnD=parsetuple.PnD,
+ TnH=parsetuple.TnH,
+ TnM=parsetuple.TnM,
+ TnS=parsetuple.TnS,
+ )
+
+ if type(parsetuple) is IntervalTuple:
+ return cls.build_interval(
+ start=parsetuple.start, end=parsetuple.end, duration=parsetuple.duration
+ )
+
+ if type(parsetuple) is RepeatingIntervalTuple:
+ return cls.build_repeating_interval(
+ R=parsetuple.R, Rnn=parsetuple.Rnn, interval=parsetuple.interval
+ )
+
+ return cls.build_timezone(
+ negative=parsetuple.negative,
+ Z=parsetuple.Z,
+ hh=parsetuple.hh,
+ mm=parsetuple.mm,
+ name=parsetuple.name,
+ )
+
+ @classmethod
+ def _is_interval_end_concise(cls, endtuple):
+ if type(endtuple) is TimeTuple:
+ return True
+
+ if type(endtuple) is DatetimeTuple:
+ enddatetuple = endtuple.date
+ else:
+ enddatetuple = endtuple
+
+ if enddatetuple.YYYY is None:
+ return True
+
+ return False
+
+ @classmethod
+ def _combine_concise_interval_tuples(cls, starttuple, conciseendtuple):
+ starttimetuple = None
+ startdatetuple = None
+
+ endtimetuple = None
+ enddatetuple = None
+
+ if type(starttuple) is DateTuple:
+ startdatetuple = starttuple
+ else:
+ # Start is a datetime
+ starttimetuple = starttuple.time
+ startdatetuple = starttuple.date
+
+ if type(conciseendtuple) is DateTuple:
+ enddatetuple = conciseendtuple
+ elif type(conciseendtuple) is DatetimeTuple:
+ enddatetuple = conciseendtuple.date
+ endtimetuple = conciseendtuple.time
+ else:
+ # Time
+ endtimetuple = conciseendtuple
+
+ if enddatetuple is not None:
+ if enddatetuple.YYYY is None and enddatetuple.MM is None:
+ newenddatetuple = DateTuple(
+ YYYY=startdatetuple.YYYY,
+ MM=startdatetuple.MM,
+ DD=enddatetuple.DD,
+ Www=enddatetuple.Www,
+ D=enddatetuple.D,
+ DDD=enddatetuple.DDD,
+ )
+ else:
+ newenddatetuple = DateTuple(
+ YYYY=startdatetuple.YYYY,
+ MM=enddatetuple.MM,
+ DD=enddatetuple.DD,
+ Www=enddatetuple.Www,
+ D=enddatetuple.D,
+ DDD=enddatetuple.DDD,
+ )
+
+ if (starttimetuple is not None and starttimetuple.tz is not None) and (
+ endtimetuple is not None and endtimetuple.tz != starttimetuple.tz
+ ):
+ # Copy the timezone across
+ endtimetuple = TimeTuple(
+ hh=endtimetuple.hh,
+ mm=endtimetuple.mm,
+ ss=endtimetuple.ss,
+ tz=starttimetuple.tz,
+ )
+
+ if enddatetuple is not None and endtimetuple is None:
+ return newenddatetuple
+
+ if enddatetuple is not None and endtimetuple is not None:
+ return TupleBuilder.build_datetime(newenddatetuple, endtimetuple)
+
+ return TupleBuilder.build_datetime(startdatetuple, endtimetuple)
+
+
+class TupleBuilder(BaseTimeBuilder):
+ # Builder used to return the arguments as a tuple, cleans up some parse methods
+ @classmethod
+ def build_date(cls, YYYY=None, MM=None, DD=None, Www=None, D=None, DDD=None):
+
+ return DateTuple(YYYY, MM, DD, Www, D, DDD)
+
+ @classmethod
+ def build_time(cls, hh=None, mm=None, ss=None, tz=None):
+ return TimeTuple(hh, mm, ss, tz)
+
+ @classmethod
+ def build_datetime(cls, date, time):
+ return DatetimeTuple(date, time)
+
+ @classmethod
+ def build_duration(
+ cls, PnY=None, PnM=None, PnW=None, PnD=None, TnH=None, TnM=None, TnS=None
+ ):
+
+ return DurationTuple(PnY, PnM, PnW, PnD, TnH, TnM, TnS)
+
+ @classmethod
+ def build_interval(cls, start=None, end=None, duration=None):
+ return IntervalTuple(start, end, duration)
+
+ @classmethod
+ def build_repeating_interval(cls, R=None, Rnn=None, interval=None):
+ return RepeatingIntervalTuple(R, Rnn, interval)
+
+ @classmethod
+ def build_timezone(cls, negative=None, Z=None, hh=None, mm=None, name=""):
+ return TimezoneTuple(negative, Z, hh, mm, name)
diff --git a/libs/aniso8601/builders/python.py b/libs/aniso8601/builders/python.py
new file mode 100644
index 000000000..8956740e7
--- /dev/null
+++ b/libs/aniso8601/builders/python.py
@@ -0,0 +1,705 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+import datetime
+from collections import namedtuple
+from functools import partial
+
+from aniso8601.builders import (
+ BaseTimeBuilder,
+ DatetimeTuple,
+ DateTuple,
+ Limit,
+ TimeTuple,
+ TupleBuilder,
+ cast,
+ range_check,
+)
+from aniso8601.exceptions import (
+ DayOutOfBoundsError,
+ HoursOutOfBoundsError,
+ ISOFormatError,
+ LeapSecondError,
+ MidnightBoundsError,
+ MinutesOutOfBoundsError,
+ MonthOutOfBoundsError,
+ SecondsOutOfBoundsError,
+ WeekOutOfBoundsError,
+ YearOutOfBoundsError,
+)
+from aniso8601.utcoffset import UTCOffset
+
+DAYS_PER_YEAR = 365
+DAYS_PER_MONTH = 30
+DAYS_PER_WEEK = 7
+
+HOURS_PER_DAY = 24
+
+MINUTES_PER_HOUR = 60
+MINUTES_PER_DAY = MINUTES_PER_HOUR * HOURS_PER_DAY
+
+SECONDS_PER_MINUTE = 60
+SECONDS_PER_DAY = MINUTES_PER_DAY * SECONDS_PER_MINUTE
+
+MICROSECONDS_PER_SECOND = int(1e6)
+
+MICROSECONDS_PER_MINUTE = 60 * MICROSECONDS_PER_SECOND
+MICROSECONDS_PER_HOUR = 60 * MICROSECONDS_PER_MINUTE
+MICROSECONDS_PER_DAY = 24 * MICROSECONDS_PER_HOUR
+MICROSECONDS_PER_WEEK = 7 * MICROSECONDS_PER_DAY
+MICROSECONDS_PER_MONTH = DAYS_PER_MONTH * MICROSECONDS_PER_DAY
+MICROSECONDS_PER_YEAR = DAYS_PER_YEAR * MICROSECONDS_PER_DAY
+
+TIMEDELTA_MAX_DAYS = datetime.timedelta.max.days
+
+FractionalComponent = namedtuple(
+ "FractionalComponent", ["principal", "microsecondremainder"]
+)
+
+
+def year_range_check(valuestr, limit):
+ YYYYstr = valuestr
+
+ # Truncated dates, like '19', refer to 1900-1999 inclusive,
+ # we simply parse to 1900
+ if len(valuestr) < 4:
+ # Shift 0s in from the left to form complete year
+ YYYYstr = valuestr.ljust(4, "0")
+
+ return range_check(YYYYstr, limit)
+
+
+def fractional_range_check(conversion, valuestr, limit):
+ if valuestr is None:
+ return None
+
+ if "." in valuestr:
+ castfunc = partial(_cast_to_fractional_component, conversion)
+ else:
+ castfunc = int
+
+ value = cast(valuestr, castfunc, thrownmessage=limit.casterrorstring)
+
+ if type(value) is FractionalComponent:
+ tocheck = float(valuestr)
+ else:
+ tocheck = int(valuestr)
+
+ if limit.min is not None and tocheck < limit.min:
+ raise limit.rangeexception(limit.rangeerrorstring)
+
+ if limit.max is not None and tocheck > limit.max:
+ raise limit.rangeexception(limit.rangeerrorstring)
+
+ return value
+
+
+def _cast_to_fractional_component(conversion, floatstr):
+ # Splits a string with a decimal point into an int, and
+ # int representing the floating point remainder as a number
+ # of microseconds, determined by multiplying by conversion
+ intpart, floatpart = floatstr.split(".")
+
+ intvalue = int(intpart)
+ preconvertedvalue = int(floatpart)
+
+ convertedvalue = (preconvertedvalue * conversion) // (10 ** len(floatpart))
+
+ return FractionalComponent(intvalue, convertedvalue)
+
+
+class PythonTimeBuilder(BaseTimeBuilder):
+ # 0000 (1 BC) is not representable as a Python date
+ DATE_YYYY_LIMIT = Limit(
+ "Invalid year string.",
+ datetime.MINYEAR,
+ datetime.MAXYEAR,
+ YearOutOfBoundsError,
+ "Year must be between {0}..{1}.".format(datetime.MINYEAR, datetime.MAXYEAR),
+ year_range_check,
+ )
+ TIME_HH_LIMIT = Limit(
+ "Invalid hour string.",
+ 0,
+ 24,
+ HoursOutOfBoundsError,
+ "Hour must be between 0..24 with " "24 representing midnight.",
+ partial(fractional_range_check, MICROSECONDS_PER_HOUR),
+ )
+ TIME_MM_LIMIT = Limit(
+ "Invalid minute string.",
+ 0,
+ 59,
+ MinutesOutOfBoundsError,
+ "Minute must be between 0..59.",
+ partial(fractional_range_check, MICROSECONDS_PER_MINUTE),
+ )
+ TIME_SS_LIMIT = Limit(
+ "Invalid second string.",
+ 0,
+ 60,
+ SecondsOutOfBoundsError,
+ "Second must be between 0..60 with " "60 representing a leap second.",
+ partial(fractional_range_check, MICROSECONDS_PER_SECOND),
+ )
+ DURATION_PNY_LIMIT = Limit(
+ "Invalid year duration string.",
+ None,
+ None,
+ YearOutOfBoundsError,
+ None,
+ partial(fractional_range_check, MICROSECONDS_PER_YEAR),
+ )
+ DURATION_PNM_LIMIT = Limit(
+ "Invalid month duration string.",
+ None,
+ None,
+ MonthOutOfBoundsError,
+ None,
+ partial(fractional_range_check, MICROSECONDS_PER_MONTH),
+ )
+ DURATION_PNW_LIMIT = Limit(
+ "Invalid week duration string.",
+ None,
+ None,
+ WeekOutOfBoundsError,
+ None,
+ partial(fractional_range_check, MICROSECONDS_PER_WEEK),
+ )
+ DURATION_PND_LIMIT = Limit(
+ "Invalid day duration string.",
+ None,
+ None,
+ DayOutOfBoundsError,
+ None,
+ partial(fractional_range_check, MICROSECONDS_PER_DAY),
+ )
+ DURATION_TNH_LIMIT = Limit(
+ "Invalid hour duration string.",
+ None,
+ None,
+ HoursOutOfBoundsError,
+ None,
+ partial(fractional_range_check, MICROSECONDS_PER_HOUR),
+ )
+ DURATION_TNM_LIMIT = Limit(
+ "Invalid minute duration string.",
+ None,
+ None,
+ MinutesOutOfBoundsError,
+ None,
+ partial(fractional_range_check, MICROSECONDS_PER_MINUTE),
+ )
+ DURATION_TNS_LIMIT = Limit(
+ "Invalid second duration string.",
+ None,
+ None,
+ SecondsOutOfBoundsError,
+ None,
+ partial(fractional_range_check, MICROSECONDS_PER_SECOND),
+ )
+
+ DATE_RANGE_DICT = BaseTimeBuilder.DATE_RANGE_DICT
+ DATE_RANGE_DICT["YYYY"] = DATE_YYYY_LIMIT
+
+ TIME_RANGE_DICT = {"hh": TIME_HH_LIMIT, "mm": TIME_MM_LIMIT, "ss": TIME_SS_LIMIT}
+
+ DURATION_RANGE_DICT = {
+ "PnY": DURATION_PNY_LIMIT,
+ "PnM": DURATION_PNM_LIMIT,
+ "PnW": DURATION_PNW_LIMIT,
+ "PnD": DURATION_PND_LIMIT,
+ "TnH": DURATION_TNH_LIMIT,
+ "TnM": DURATION_TNM_LIMIT,
+ "TnS": DURATION_TNS_LIMIT,
+ }
+
+ @classmethod
+ def build_date(cls, YYYY=None, MM=None, DD=None, Www=None, D=None, DDD=None):
+ YYYY, MM, DD, Www, D, DDD = cls.range_check_date(YYYY, MM, DD, Www, D, DDD)
+
+ if MM is None:
+ MM = 1
+
+ if DD is None:
+ DD = 1
+
+ if DDD is not None:
+ return PythonTimeBuilder._build_ordinal_date(YYYY, DDD)
+
+ if Www is not None:
+ return PythonTimeBuilder._build_week_date(YYYY, Www, isoday=D)
+
+ return datetime.date(YYYY, MM, DD)
+
+ @classmethod
+ def build_time(cls, hh=None, mm=None, ss=None, tz=None):
+ # Builds a time from the given parts, handling fractional arguments
+ # where necessary
+ hours = 0
+ minutes = 0
+ seconds = 0
+ microseconds = 0
+
+ hh, mm, ss, tz = cls.range_check_time(hh, mm, ss, tz)
+
+ if type(hh) is FractionalComponent:
+ hours = hh.principal
+ microseconds = hh.microsecondremainder
+ elif hh is not None:
+ hours = hh
+
+ if type(mm) is FractionalComponent:
+ minutes = mm.principal
+ microseconds = mm.microsecondremainder
+ elif mm is not None:
+ minutes = mm
+
+ if type(ss) is FractionalComponent:
+ seconds = ss.principal
+ microseconds = ss.microsecondremainder
+ elif ss is not None:
+ seconds = ss
+
+ (
+ hours,
+ minutes,
+ seconds,
+ microseconds,
+ ) = PythonTimeBuilder._distribute_microseconds(
+ microseconds,
+ (hours, minutes, seconds),
+ (MICROSECONDS_PER_HOUR, MICROSECONDS_PER_MINUTE, MICROSECONDS_PER_SECOND),
+ )
+
+ # Move midnight into range
+ if hours == 24:
+ hours = 0
+
+ # Datetimes don't handle fractional components, so we use a timedelta
+ if tz is not None:
+ return (
+ datetime.datetime(
+ 1, 1, 1, hour=hours, minute=minutes, tzinfo=cls._build_object(tz)
+ )
+ + datetime.timedelta(seconds=seconds, microseconds=microseconds)
+ ).timetz()
+
+ return (
+ datetime.datetime(1, 1, 1, hour=hours, minute=minutes)
+ + datetime.timedelta(seconds=seconds, microseconds=microseconds)
+ ).time()
+
+ @classmethod
+ def build_datetime(cls, date, time):
+ return datetime.datetime.combine(
+ cls._build_object(date), cls._build_object(time)
+ )
+
+ @classmethod
+ def build_duration(
+ cls, PnY=None, PnM=None, PnW=None, PnD=None, TnH=None, TnM=None, TnS=None
+ ):
+ # PnY and PnM will be distributed to PnD, microsecond remainder to TnS
+ PnY, PnM, PnW, PnD, TnH, TnM, TnS = cls.range_check_duration(
+ PnY, PnM, PnW, PnD, TnH, TnM, TnS
+ )
+
+ seconds = TnS.principal
+ microseconds = TnS.microsecondremainder
+
+ return datetime.timedelta(
+ days=PnD,
+ seconds=seconds,
+ microseconds=microseconds,
+ minutes=TnM,
+ hours=TnH,
+ weeks=PnW,
+ )
+
+ @classmethod
+ def build_interval(cls, start=None, end=None, duration=None):
+ start, end, duration = cls.range_check_interval(start, end, duration)
+
+ if start is not None and end is not None:
+ # <start>/<end>
+ startobject = cls._build_object(start)
+ endobject = cls._build_object(end)
+
+ return (startobject, endobject)
+
+ durationobject = cls._build_object(duration)
+
+ # Determine if datetime promotion is required
+ datetimerequired = (
+ duration.TnH is not None
+ or duration.TnM is not None
+ or duration.TnS is not None
+ or durationobject.seconds != 0
+ or durationobject.microseconds != 0
+ )
+
+ if end is not None:
+ # <duration>/<end>
+ endobject = cls._build_object(end)
+
+ # Range check
+ if type(end) is DateTuple and datetimerequired is True:
+ # <end> is a date, and <duration> requires datetime resolution
+ return (
+ endobject,
+ cls.build_datetime(end, TupleBuilder.build_time()) - durationobject,
+ )
+
+ return (endobject, endobject - durationobject)
+
+ # <start>/<duration>
+ startobject = cls._build_object(start)
+
+ # Range check
+ if type(start) is DateTuple and datetimerequired is True:
+ # <start> is a date, and <duration> requires datetime resolution
+ return (
+ startobject,
+ cls.build_datetime(start, TupleBuilder.build_time()) + durationobject,
+ )
+
+ return (startobject, startobject + durationobject)
+
+ @classmethod
+ def build_repeating_interval(cls, R=None, Rnn=None, interval=None):
+ startobject = None
+ endobject = None
+
+ R, Rnn, interval = cls.range_check_repeating_interval(R, Rnn, interval)
+
+ if interval.start is not None:
+ startobject = cls._build_object(interval.start)
+
+ if interval.end is not None:
+ endobject = cls._build_object(interval.end)
+
+ if interval.duration is not None:
+ durationobject = cls._build_object(interval.duration)
+ else:
+ durationobject = endobject - startobject
+
+ if R is True:
+ if startobject is not None:
+ return cls._date_generator_unbounded(startobject, durationobject)
+
+ return cls._date_generator_unbounded(endobject, -durationobject)
+
+ iterations = int(Rnn)
+
+ if startobject is not None:
+ return cls._date_generator(startobject, durationobject, iterations)
+
+ return cls._date_generator(endobject, -durationobject, iterations)
+
+ @classmethod
+ def build_timezone(cls, negative=None, Z=None, hh=None, mm=None, name=""):
+ negative, Z, hh, mm, name = cls.range_check_timezone(negative, Z, hh, mm, name)
+
+ if Z is True:
+ # Z -> UTC
+ return UTCOffset(name="UTC", minutes=0)
+
+ tzhour = int(hh)
+
+ if mm is not None:
+ tzminute = int(mm)
+ else:
+ tzminute = 0
+
+ if negative is True:
+ return UTCOffset(name=name, minutes=-(tzhour * 60 + tzminute))
+
+ return UTCOffset(name=name, minutes=tzhour * 60 + tzminute)
+
+ @classmethod
+ def range_check_duration(
+ cls,
+ PnY=None,
+ PnM=None,
+ PnW=None,
+ PnD=None,
+ TnH=None,
+ TnM=None,
+ TnS=None,
+ rangedict=None,
+ ):
+ years = 0
+ months = 0
+ days = 0
+ weeks = 0
+ hours = 0
+ minutes = 0
+ seconds = 0
+ microseconds = 0
+
+ PnY, PnM, PnW, PnD, TnH, TnM, TnS = BaseTimeBuilder.range_check_duration(
+ PnY, PnM, PnW, PnD, TnH, TnM, TnS, rangedict=cls.DURATION_RANGE_DICT
+ )
+
+ if PnY is not None:
+ if type(PnY) is FractionalComponent:
+ years = PnY.principal
+ microseconds = PnY.microsecondremainder
+ else:
+ years = PnY
+
+ if years * DAYS_PER_YEAR > TIMEDELTA_MAX_DAYS:
+ raise YearOutOfBoundsError("Duration exceeds maximum timedelta size.")
+
+ if PnM is not None:
+ if type(PnM) is FractionalComponent:
+ months = PnM.principal
+ microseconds = PnM.microsecondremainder
+ else:
+ months = PnM
+
+ if months * DAYS_PER_MONTH > TIMEDELTA_MAX_DAYS:
+ raise MonthOutOfBoundsError("Duration exceeds maximum timedelta size.")
+
+ if PnW is not None:
+ if type(PnW) is FractionalComponent:
+ weeks = PnW.principal
+ microseconds = PnW.microsecondremainder
+ else:
+ weeks = PnW
+
+ if weeks * DAYS_PER_WEEK > TIMEDELTA_MAX_DAYS:
+ raise WeekOutOfBoundsError("Duration exceeds maximum timedelta size.")
+
+ if PnD is not None:
+ if type(PnD) is FractionalComponent:
+ days = PnD.principal
+ microseconds = PnD.microsecondremainder
+ else:
+ days = PnD
+
+ if days > TIMEDELTA_MAX_DAYS:
+ raise DayOutOfBoundsError("Duration exceeds maximum timedelta size.")
+
+ if TnH is not None:
+ if type(TnH) is FractionalComponent:
+ hours = TnH.principal
+ microseconds = TnH.microsecondremainder
+ else:
+ hours = TnH
+
+ if hours // HOURS_PER_DAY > TIMEDELTA_MAX_DAYS:
+ raise HoursOutOfBoundsError("Duration exceeds maximum timedelta size.")
+
+ if TnM is not None:
+ if type(TnM) is FractionalComponent:
+ minutes = TnM.principal
+ microseconds = TnM.microsecondremainder
+ else:
+ minutes = TnM
+
+ if minutes // MINUTES_PER_DAY > TIMEDELTA_MAX_DAYS:
+ raise MinutesOutOfBoundsError(
+ "Duration exceeds maximum timedelta size."
+ )
+
+ if TnS is not None:
+ if type(TnS) is FractionalComponent:
+ seconds = TnS.principal
+ microseconds = TnS.microsecondremainder
+ else:
+ seconds = TnS
+
+ if seconds // SECONDS_PER_DAY > TIMEDELTA_MAX_DAYS:
+ raise SecondsOutOfBoundsError(
+ "Duration exceeds maximum timedelta size."
+ )
+
+ (
+ years,
+ months,
+ weeks,
+ days,
+ hours,
+ minutes,
+ seconds,
+ microseconds,
+ ) = PythonTimeBuilder._distribute_microseconds(
+ microseconds,
+ (years, months, weeks, days, hours, minutes, seconds),
+ (
+ MICROSECONDS_PER_YEAR,
+ MICROSECONDS_PER_MONTH,
+ MICROSECONDS_PER_WEEK,
+ MICROSECONDS_PER_DAY,
+ MICROSECONDS_PER_HOUR,
+ MICROSECONDS_PER_MINUTE,
+ MICROSECONDS_PER_SECOND,
+ ),
+ )
+
+ # Note that weeks can be handled without conversion to days
+ totaldays = years * DAYS_PER_YEAR + months * DAYS_PER_MONTH + days
+
+ # Check against timedelta limits
+ if (
+ totaldays
+ + weeks * DAYS_PER_WEEK
+ + hours // HOURS_PER_DAY
+ + minutes // MINUTES_PER_DAY
+ + seconds // SECONDS_PER_DAY
+ > TIMEDELTA_MAX_DAYS
+ ):
+ raise DayOutOfBoundsError("Duration exceeds maximum timedelta size.")
+
+ return (
+ None,
+ None,
+ weeks,
+ totaldays,
+ hours,
+ minutes,
+ FractionalComponent(seconds, microseconds),
+ )
+
+ @classmethod
+ def range_check_interval(cls, start=None, end=None, duration=None):
+ # Handles concise format, range checks any potential durations
+ if start is not None and end is not None:
+ # <start>/<end>
+ # Handle concise format
+ if cls._is_interval_end_concise(end) is True:
+ end = cls._combine_concise_interval_tuples(start, end)
+
+ return (start, end, duration)
+
+ durationobject = cls._build_object(duration)
+
+ if end is not None:
+ # <duration>/<end>
+ endobject = cls._build_object(end)
+
+ # Range check
+ if type(end) is DateTuple:
+ enddatetime = cls.build_datetime(end, TupleBuilder.build_time())
+
+ if enddatetime - datetime.datetime.min < durationobject:
+ raise YearOutOfBoundsError("Interval end less than minimium date.")
+ else:
+ mindatetime = datetime.datetime.min
+
+ if end.time.tz is not None:
+ mindatetime = mindatetime.replace(tzinfo=endobject.tzinfo)
+
+ if endobject - mindatetime < durationobject:
+ raise YearOutOfBoundsError("Interval end less than minimium date.")
+ else:
+ # <start>/<duration>
+ startobject = cls._build_object(start)
+
+ # Range check
+ if type(start) is DateTuple:
+ startdatetime = cls.build_datetime(start, TupleBuilder.build_time())
+
+ if datetime.datetime.max - startdatetime < durationobject:
+ raise YearOutOfBoundsError(
+ "Interval end greater than maximum date."
+ )
+ else:
+ maxdatetime = datetime.datetime.max
+
+ if start.time.tz is not None:
+ maxdatetime = maxdatetime.replace(tzinfo=startobject.tzinfo)
+
+ if maxdatetime - startobject < durationobject:
+ raise YearOutOfBoundsError(
+ "Interval end greater than maximum date."
+ )
+
+ return (start, end, duration)
+
+ @staticmethod
+ def _build_week_date(isoyear, isoweek, isoday=None):
+ if isoday is None:
+ return PythonTimeBuilder._iso_year_start(isoyear) + datetime.timedelta(
+ weeks=isoweek - 1
+ )
+
+ return PythonTimeBuilder._iso_year_start(isoyear) + datetime.timedelta(
+ weeks=isoweek - 1, days=isoday - 1
+ )
+
+ @staticmethod
+ def _build_ordinal_date(isoyear, isoday):
+ # Day of year to a date
+ # https://stackoverflow.com/questions/2427555/python-question-year-and-day-of-year-to-date
+ builtdate = datetime.date(isoyear, 1, 1) + datetime.timedelta(days=isoday - 1)
+
+ return builtdate
+
+ @staticmethod
+ def _iso_year_start(isoyear):
+ # Given an ISO year, returns the equivalent of the start of the year
+ # on the Gregorian calendar (which is used by Python)
+ # Stolen from:
+ # http://stackoverflow.com/questions/304256/whats-the-best-way-to-find-the-inverse-of-datetime-isocalendar
+
+ # Determine the location of the 4th of January, the first week of
+ # the ISO year is the week containing the 4th of January
+ # http://en.wikipedia.org/wiki/ISO_week_date
+ fourth_jan = datetime.date(isoyear, 1, 4)
+
+ # Note the conversion from ISO day (1 - 7) and Python day (0 - 6)
+ delta = datetime.timedelta(days=fourth_jan.isoweekday() - 1)
+
+ # Return the start of the year
+ return fourth_jan - delta
+
+ @staticmethod
+ def _date_generator(startdate, timedelta, iterations):
+ currentdate = startdate
+ currentiteration = 0
+
+ while currentiteration < iterations:
+ yield currentdate
+
+ # Update the values
+ currentdate += timedelta
+ currentiteration += 1
+
+ @staticmethod
+ def _date_generator_unbounded(startdate, timedelta):
+ currentdate = startdate
+
+ while True:
+ yield currentdate
+
+ # Update the value
+ currentdate += timedelta
+
+ @staticmethod
+ def _distribute_microseconds(todistribute, recipients, reductions):
+ # Given a number of microseconds as int, a tuple of ints length n
+ # to distribute to, and a tuple of ints length n to divide todistribute
+ # by (from largest to smallest), returns a tuple of length n + 1, with
+ # todistribute divided across recipients using the reductions, with
+ # the final remainder returned as the final tuple member
+ results = []
+
+ remainder = todistribute
+
+ for index, reduction in enumerate(reductions):
+ additional, remainder = divmod(remainder, reduction)
+
+ results.append(recipients[index] + additional)
+
+ # Always return the remaining microseconds
+ results.append(remainder)
+
+ return tuple(results)
diff --git a/libs/aniso8601/builders/tests/__init__.py b/libs/aniso8601/builders/tests/__init__.py
new file mode 100644
index 000000000..1a94e017a
--- /dev/null
+++ b/libs/aniso8601/builders/tests/__init__.py
@@ -0,0 +1,7 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
diff --git a/libs/aniso8601/builders/tests/test_init.py b/libs/aniso8601/builders/tests/test_init.py
new file mode 100644
index 000000000..7c9f092c7
--- /dev/null
+++ b/libs/aniso8601/builders/tests/test_init.py
@@ -0,0 +1,838 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+import unittest
+
+import aniso8601
+from aniso8601.builders import (
+ BaseTimeBuilder,
+ DatetimeTuple,
+ DateTuple,
+ DurationTuple,
+ IntervalTuple,
+ RepeatingIntervalTuple,
+ TimeTuple,
+ TimezoneTuple,
+ TupleBuilder,
+ cast,
+)
+from aniso8601.exceptions import (
+ DayOutOfBoundsError,
+ HoursOutOfBoundsError,
+ ISOFormatError,
+ LeapSecondError,
+ MidnightBoundsError,
+ MinutesOutOfBoundsError,
+ MonthOutOfBoundsError,
+ SecondsOutOfBoundsError,
+ WeekOutOfBoundsError,
+)
+from aniso8601.tests.compat import mock
+
+
+class LeapSecondSupportingTestBuilder(BaseTimeBuilder):
+ LEAP_SECONDS_SUPPORTED = True
+
+
+class TestBuilderFunctions(unittest.TestCase):
+ def test_cast(self):
+ self.assertEqual(cast("1", int), 1)
+ self.assertEqual(cast("-2", int), -2)
+ self.assertEqual(cast("3", float), float(3))
+ self.assertEqual(cast("-4", float), float(-4))
+ self.assertEqual(cast("5.6", float), 5.6)
+ self.assertEqual(cast("-7.8", float), -7.8)
+
+ def test_cast_exception(self):
+ with self.assertRaises(ISOFormatError):
+ cast("asdf", int)
+
+ with self.assertRaises(ISOFormatError):
+ cast("asdf", float)
+
+ def test_cast_caughtexception(self):
+ def tester(value):
+ raise RuntimeError
+
+ with self.assertRaises(ISOFormatError):
+ cast("asdf", tester, caughtexceptions=(RuntimeError,))
+
+ def test_cast_thrownexception(self):
+ with self.assertRaises(RuntimeError):
+ cast("asdf", int, thrownexception=RuntimeError)
+
+
+class TestBaseTimeBuilder(unittest.TestCase):
+ def test_build_date(self):
+ with self.assertRaises(NotImplementedError):
+ BaseTimeBuilder.build_date()
+
+ def test_build_time(self):
+ with self.assertRaises(NotImplementedError):
+ BaseTimeBuilder.build_time()
+
+ def test_build_datetime(self):
+ with self.assertRaises(NotImplementedError):
+ BaseTimeBuilder.build_datetime(None, None)
+
+ def test_build_duration(self):
+ with self.assertRaises(NotImplementedError):
+ BaseTimeBuilder.build_duration()
+
+ def test_build_interval(self):
+ with self.assertRaises(NotImplementedError):
+ BaseTimeBuilder.build_interval()
+
+ def test_build_repeating_interval(self):
+ with self.assertRaises(NotImplementedError):
+ BaseTimeBuilder.build_repeating_interval()
+
+ def test_build_timezone(self):
+ with self.assertRaises(NotImplementedError):
+ BaseTimeBuilder.build_timezone()
+
+ def test_range_check_date(self):
+ # Check the calendar for day ranges
+ with self.assertRaises(DayOutOfBoundsError):
+ BaseTimeBuilder.range_check_date(YYYY="0007", MM="02", DD="30")
+
+ with self.assertRaises(DayOutOfBoundsError):
+ BaseTimeBuilder.range_check_date(YYYY="0007", DDD="366")
+
+ with self.assertRaises(MonthOutOfBoundsError):
+ BaseTimeBuilder.range_check_date(YYYY="4333", MM="30", DD="30")
+
+ # 0 isn't a valid week number
+ with self.assertRaises(WeekOutOfBoundsError):
+ BaseTimeBuilder.range_check_date(YYYY="2003", Www="00")
+
+ # Week must not be larger than 53
+ with self.assertRaises(WeekOutOfBoundsError):
+ BaseTimeBuilder.range_check_date(YYYY="2004", Www="54")
+
+ # 0 isn't a valid day number
+ with self.assertRaises(DayOutOfBoundsError):
+ BaseTimeBuilder.range_check_date(YYYY="2001", Www="02", D="0")
+
+ # Day must not be larger than 7
+ with self.assertRaises(DayOutOfBoundsError):
+ BaseTimeBuilder.range_check_date(YYYY="2001", Www="02", D="8")
+
+ with self.assertRaises(DayOutOfBoundsError):
+ BaseTimeBuilder.range_check_date(YYYY="1981", DDD="000")
+
+ # Day must be 365, or 366, not larger
+ with self.assertRaises(DayOutOfBoundsError):
+ BaseTimeBuilder.range_check_date(YYYY="1234", DDD="000")
+
+ with self.assertRaises(DayOutOfBoundsError):
+ BaseTimeBuilder.range_check_date(YYYY="1234", DDD="367")
+
+ # https://bitbucket.org/nielsenb/aniso8601/issues/14/parsing-ordinal-dates-should-only-allow
+ with self.assertRaises(DayOutOfBoundsError):
+ BaseTimeBuilder.range_check_date(YYYY="1981", DDD="366")
+
+ # Make sure Nones pass through unmodified
+ self.assertEqual(
+ BaseTimeBuilder.range_check_date(rangedict={}),
+ (None, None, None, None, None, None),
+ )
+
+ def test_range_check_time(self):
+ # Leap seconds not supported
+ # https://bitbucket.org/nielsenb/aniso8601/issues/10/sub-microsecond-precision-in-durations-is
+ # https://bitbucket.org/nielsenb/aniso8601/issues/13/parsing-of-leap-second-gives-wildly
+ with self.assertRaises(LeapSecondError):
+ BaseTimeBuilder.range_check_time(hh="23", mm="59", ss="60")
+
+ with self.assertRaises(SecondsOutOfBoundsError):
+ BaseTimeBuilder.range_check_time(hh="00", mm="00", ss="60")
+
+ with self.assertRaises(SecondsOutOfBoundsError):
+ BaseTimeBuilder.range_check_time(hh="00", mm="00", ss="61")
+
+ with self.assertRaises(MinutesOutOfBoundsError):
+ BaseTimeBuilder.range_check_time(hh="00", mm="61")
+
+ with self.assertRaises(MinutesOutOfBoundsError):
+ BaseTimeBuilder.range_check_time(hh="00", mm="60")
+
+ with self.assertRaises(MinutesOutOfBoundsError):
+ BaseTimeBuilder.range_check_time(hh="00", mm="60.1")
+
+ with self.assertRaises(HoursOutOfBoundsError):
+ BaseTimeBuilder.range_check_time(hh="25")
+
+ # Hour 24 can only represent midnight
+ with self.assertRaises(MidnightBoundsError):
+ BaseTimeBuilder.range_check_time(hh="24", mm="00", ss="01")
+
+ with self.assertRaises(MidnightBoundsError):
+ BaseTimeBuilder.range_check_time(hh="24", mm="00.1")
+
+ with self.assertRaises(MidnightBoundsError):
+ BaseTimeBuilder.range_check_time(hh="24", mm="01")
+
+ with self.assertRaises(MidnightBoundsError):
+ BaseTimeBuilder.range_check_time(hh="24.1")
+
+ # Leap seconds not supported
+ # https://bitbucket.org/nielsenb/aniso8601/issues/10/sub-microsecond-precision-in-durations-is
+ # https://bitbucket.org/nielsenb/aniso8601/issues/13/parsing-of-leap-second-gives-wildly
+ with self.assertRaises(LeapSecondError):
+ BaseTimeBuilder.range_check_time(hh="23", mm="59", ss="60")
+
+ # Make sure Nones pass through unmodified
+ self.assertEqual(
+ BaseTimeBuilder.range_check_time(rangedict={}), (None, None, None, None)
+ )
+
+ def test_range_check_time_leap_seconds_supported(self):
+ self.assertEqual(
+ LeapSecondSupportingTestBuilder.range_check_time(hh="23", mm="59", ss="60"),
+ (23, 59, 60, None),
+ )
+
+ with self.assertRaises(SecondsOutOfBoundsError):
+ LeapSecondSupportingTestBuilder.range_check_time(hh="01", mm="02", ss="60")
+
+ def test_range_check_duration(self):
+ self.assertEqual(
+ BaseTimeBuilder.range_check_duration(),
+ (None, None, None, None, None, None, None),
+ )
+
+ self.assertEqual(
+ BaseTimeBuilder.range_check_duration(rangedict={}),
+ (None, None, None, None, None, None, None),
+ )
+
+ def test_range_check_repeating_interval(self):
+ self.assertEqual(
+ BaseTimeBuilder.range_check_repeating_interval(), (None, None, None)
+ )
+
+ self.assertEqual(
+ BaseTimeBuilder.range_check_repeating_interval(rangedict={}),
+ (None, None, None),
+ )
+
+ def test_range_check_timezone(self):
+ self.assertEqual(
+ BaseTimeBuilder.range_check_timezone(), (None, None, None, None, "")
+ )
+
+ self.assertEqual(
+ BaseTimeBuilder.range_check_timezone(rangedict={}),
+ (None, None, None, None, ""),
+ )
+
+ def test_build_object(self):
+ datetest = (
+ DateTuple("1", "2", "3", "4", "5", "6"),
+ {"YYYY": "1", "MM": "2", "DD": "3", "Www": "4", "D": "5", "DDD": "6"},
+ )
+
+ timetest = (
+ TimeTuple("1", "2", "3", TimezoneTuple(False, False, "4", "5", "tz name")),
+ {
+ "hh": "1",
+ "mm": "2",
+ "ss": "3",
+ "tz": TimezoneTuple(False, False, "4", "5", "tz name"),
+ },
+ )
+
+ datetimetest = (
+ DatetimeTuple(
+ DateTuple("1", "2", "3", "4", "5", "6"),
+ TimeTuple(
+ "7", "8", "9", TimezoneTuple(True, False, "10", "11", "tz name")
+ ),
+ ),
+ (
+ DateTuple("1", "2", "3", "4", "5", "6"),
+ TimeTuple(
+ "7", "8", "9", TimezoneTuple(True, False, "10", "11", "tz name")
+ ),
+ ),
+ )
+
+ durationtest = (
+ DurationTuple("1", "2", "3", "4", "5", "6", "7"),
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": "3",
+ "PnD": "4",
+ "TnH": "5",
+ "TnM": "6",
+ "TnS": "7",
+ },
+ )
+
+ intervaltests = (
+ (
+ IntervalTuple(
+ DateTuple("1", "2", "3", "4", "5", "6"),
+ DateTuple("7", "8", "9", "10", "11", "12"),
+ None,
+ ),
+ {
+ "start": DateTuple("1", "2", "3", "4", "5", "6"),
+ "end": DateTuple("7", "8", "9", "10", "11", "12"),
+ "duration": None,
+ },
+ ),
+ (
+ IntervalTuple(
+ DateTuple("1", "2", "3", "4", "5", "6"),
+ None,
+ DurationTuple("7", "8", "9", "10", "11", "12", "13"),
+ ),
+ {
+ "start": DateTuple("1", "2", "3", "4", "5", "6"),
+ "end": None,
+ "duration": DurationTuple("7", "8", "9", "10", "11", "12", "13"),
+ },
+ ),
+ (
+ IntervalTuple(
+ None,
+ TimeTuple(
+ "1", "2", "3", TimezoneTuple(True, False, "4", "5", "tz name")
+ ),
+ DurationTuple("6", "7", "8", "9", "10", "11", "12"),
+ ),
+ {
+ "start": None,
+ "end": TimeTuple(
+ "1", "2", "3", TimezoneTuple(True, False, "4", "5", "tz name")
+ ),
+ "duration": DurationTuple("6", "7", "8", "9", "10", "11", "12"),
+ },
+ ),
+ )
+
+ repeatingintervaltests = (
+ (
+ RepeatingIntervalTuple(
+ True,
+ None,
+ IntervalTuple(
+ DateTuple("1", "2", "3", "4", "5", "6"),
+ DateTuple("7", "8", "9", "10", "11", "12"),
+ None,
+ ),
+ ),
+ {
+ "R": True,
+ "Rnn": None,
+ "interval": IntervalTuple(
+ DateTuple("1", "2", "3", "4", "5", "6"),
+ DateTuple("7", "8", "9", "10", "11", "12"),
+ None,
+ ),
+ },
+ ),
+ (
+ RepeatingIntervalTuple(
+ False,
+ "1",
+ IntervalTuple(
+ DatetimeTuple(
+ DateTuple("2", "3", "4", "5", "6", "7"),
+ TimeTuple("8", "9", "10", None),
+ ),
+ DatetimeTuple(
+ DateTuple("11", "12", "13", "14", "15", "16"),
+ TimeTuple("17", "18", "19", None),
+ ),
+ None,
+ ),
+ ),
+ {
+ "R": False,
+ "Rnn": "1",
+ "interval": IntervalTuple(
+ DatetimeTuple(
+ DateTuple("2", "3", "4", "5", "6", "7"),
+ TimeTuple("8", "9", "10", None),
+ ),
+ DatetimeTuple(
+ DateTuple("11", "12", "13", "14", "15", "16"),
+ TimeTuple("17", "18", "19", None),
+ ),
+ None,
+ ),
+ },
+ ),
+ )
+
+ timezonetest = (
+ TimezoneTuple(False, False, "1", "2", "+01:02"),
+ {"negative": False, "Z": False, "hh": "1", "mm": "2", "name": "+01:02"},
+ )
+
+ with mock.patch.object(
+ aniso8601.builders.BaseTimeBuilder, "build_date"
+ ) as mock_build:
+ mock_build.return_value = datetest[0]
+
+ result = BaseTimeBuilder._build_object(datetest[0])
+
+ self.assertEqual(result, datetest[0])
+ mock_build.assert_called_once_with(**datetest[1])
+
+ with mock.patch.object(
+ aniso8601.builders.BaseTimeBuilder, "build_time"
+ ) as mock_build:
+ mock_build.return_value = timetest[0]
+
+ result = BaseTimeBuilder._build_object(timetest[0])
+
+ self.assertEqual(result, timetest[0])
+ mock_build.assert_called_once_with(**timetest[1])
+
+ with mock.patch.object(
+ aniso8601.builders.BaseTimeBuilder, "build_datetime"
+ ) as mock_build:
+ mock_build.return_value = datetimetest[0]
+
+ result = BaseTimeBuilder._build_object(datetimetest[0])
+
+ self.assertEqual(result, datetimetest[0])
+ mock_build.assert_called_once_with(*datetimetest[1])
+
+ with mock.patch.object(
+ aniso8601.builders.BaseTimeBuilder, "build_duration"
+ ) as mock_build:
+ mock_build.return_value = durationtest[0]
+
+ result = BaseTimeBuilder._build_object(durationtest[0])
+
+ self.assertEqual(result, durationtest[0])
+ mock_build.assert_called_once_with(**durationtest[1])
+
+ for intervaltest in intervaltests:
+ with mock.patch.object(
+ aniso8601.builders.BaseTimeBuilder, "build_interval"
+ ) as mock_build:
+ mock_build.return_value = intervaltest[0]
+
+ result = BaseTimeBuilder._build_object(intervaltest[0])
+
+ self.assertEqual(result, intervaltest[0])
+ mock_build.assert_called_once_with(**intervaltest[1])
+
+ for repeatingintervaltest in repeatingintervaltests:
+ with mock.patch.object(
+ aniso8601.builders.BaseTimeBuilder, "build_repeating_interval"
+ ) as mock_build:
+ mock_build.return_value = repeatingintervaltest[0]
+
+ result = BaseTimeBuilder._build_object(repeatingintervaltest[0])
+
+ self.assertEqual(result, repeatingintervaltest[0])
+ mock_build.assert_called_once_with(**repeatingintervaltest[1])
+
+ with mock.patch.object(
+ aniso8601.builders.BaseTimeBuilder, "build_timezone"
+ ) as mock_build:
+ mock_build.return_value = timezonetest[0]
+
+ result = BaseTimeBuilder._build_object(timezonetest[0])
+
+ self.assertEqual(result, timezonetest[0])
+ mock_build.assert_called_once_with(**timezonetest[1])
+
+ def test_is_interval_end_concise(self):
+ self.assertTrue(
+ BaseTimeBuilder._is_interval_end_concise(TimeTuple("1", "2", "3", None))
+ )
+ self.assertTrue(
+ BaseTimeBuilder._is_interval_end_concise(
+ DateTuple(None, "2", "3", "4", "5", "6")
+ )
+ )
+ self.assertTrue(
+ BaseTimeBuilder._is_interval_end_concise(
+ DatetimeTuple(
+ DateTuple(None, "2", "3", "4", "5", "6"),
+ TimeTuple("7", "8", "9", None),
+ )
+ )
+ )
+
+ self.assertFalse(
+ BaseTimeBuilder._is_interval_end_concise(
+ DateTuple("1", "2", "3", "4", "5", "6")
+ )
+ )
+ self.assertFalse(
+ BaseTimeBuilder._is_interval_end_concise(
+ DatetimeTuple(
+ DateTuple("1", "2", "3", "4", "5", "6"),
+ TimeTuple("7", "8", "9", None),
+ )
+ )
+ )
+
+ def test_combine_concise_interval_tuples(self):
+ testtuples = (
+ (
+ DateTuple("2020", "01", "01", None, None, None),
+ DateTuple(None, None, "02", None, None, None),
+ DateTuple("2020", "01", "02", None, None, None),
+ ),
+ (
+ DateTuple("2008", "02", "15", None, None, None),
+ DateTuple(None, "03", "14", None, None, None),
+ DateTuple("2008", "03", "14", None, None, None),
+ ),
+ (
+ DatetimeTuple(
+ DateTuple("2007", "12", "14", None, None, None),
+ TimeTuple("13", "30", None, None),
+ ),
+ TimeTuple("15", "30", None, None),
+ DatetimeTuple(
+ DateTuple("2007", "12", "14", None, None, None),
+ TimeTuple("15", "30", None, None),
+ ),
+ ),
+ (
+ DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple("09", "00", None, None),
+ ),
+ DatetimeTuple(
+ DateTuple(None, None, "15", None, None, None),
+ TimeTuple("17", "00", None, None),
+ ),
+ DatetimeTuple(
+ DateTuple("2007", "11", "15", None, None, None),
+ TimeTuple("17", "00", None, None),
+ ),
+ ),
+ (
+ DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple("00", "00", None, None),
+ ),
+ DatetimeTuple(
+ DateTuple(None, None, "16", None, None, None),
+ TimeTuple("00", "00", None, None),
+ ),
+ DatetimeTuple(
+ DateTuple("2007", "11", "16", None, None, None),
+ TimeTuple("00", "00", None, None),
+ ),
+ ),
+ (
+ DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple(
+ "09", "00", None, TimezoneTuple(False, True, None, None, "Z")
+ ),
+ ),
+ DatetimeTuple(
+ DateTuple(None, None, "15", None, None, None),
+ TimeTuple("17", "00", None, None),
+ ),
+ DatetimeTuple(
+ DateTuple("2007", "11", "15", None, None, None),
+ TimeTuple(
+ "17", "00", None, TimezoneTuple(False, True, None, None, "Z")
+ ),
+ ),
+ ),
+ )
+
+ for testtuple in testtuples:
+ result = BaseTimeBuilder._combine_concise_interval_tuples(
+ testtuple[0], testtuple[1]
+ )
+ self.assertEqual(result, testtuple[2])
+
+
+class TestTupleBuilder(unittest.TestCase):
+ def test_build_date(self):
+ datetuple = TupleBuilder.build_date()
+
+ self.assertEqual(datetuple, DateTuple(None, None, None, None, None, None))
+
+ datetuple = TupleBuilder.build_date(
+ YYYY="1", MM="2", DD="3", Www="4", D="5", DDD="6"
+ )
+
+ self.assertEqual(datetuple, DateTuple("1", "2", "3", "4", "5", "6"))
+
+ def test_build_time(self):
+ testtuples = (
+ ({}, TimeTuple(None, None, None, None)),
+ (
+ {"hh": "1", "mm": "2", "ss": "3", "tz": None},
+ TimeTuple("1", "2", "3", None),
+ ),
+ (
+ {
+ "hh": "1",
+ "mm": "2",
+ "ss": "3",
+ "tz": TimezoneTuple(False, False, "4", "5", "tz name"),
+ },
+ TimeTuple(
+ "1", "2", "3", TimezoneTuple(False, False, "4", "5", "tz name")
+ ),
+ ),
+ )
+
+ for testtuple in testtuples:
+ self.assertEqual(TupleBuilder.build_time(**testtuple[0]), testtuple[1])
+
+ def test_build_datetime(self):
+ testtuples = (
+ (
+ {
+ "date": DateTuple("1", "2", "3", "4", "5", "6"),
+ "time": TimeTuple("7", "8", "9", None),
+ },
+ DatetimeTuple(
+ DateTuple("1", "2", "3", "4", "5", "6"),
+ TimeTuple("7", "8", "9", None),
+ ),
+ ),
+ (
+ {
+ "date": DateTuple("1", "2", "3", "4", "5", "6"),
+ "time": TimeTuple(
+ "7", "8", "9", TimezoneTuple(True, False, "10", "11", "tz name")
+ ),
+ },
+ DatetimeTuple(
+ DateTuple("1", "2", "3", "4", "5", "6"),
+ TimeTuple(
+ "7", "8", "9", TimezoneTuple(True, False, "10", "11", "tz name")
+ ),
+ ),
+ ),
+ )
+
+ for testtuple in testtuples:
+ self.assertEqual(TupleBuilder.build_datetime(**testtuple[0]), testtuple[1])
+
+ def test_build_duration(self):
+ testtuples = (
+ ({}, DurationTuple(None, None, None, None, None, None, None)),
+ (
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": "3",
+ "PnD": "4",
+ "TnH": "5",
+ "TnM": "6",
+ "TnS": "7",
+ },
+ DurationTuple("1", "2", "3", "4", "5", "6", "7"),
+ ),
+ )
+
+ for testtuple in testtuples:
+ self.assertEqual(TupleBuilder.build_duration(**testtuple[0]), testtuple[1])
+
+ def test_build_interval(self):
+ testtuples = (
+ ({}, IntervalTuple(None, None, None)),
+ (
+ {
+ "start": DateTuple("1", "2", "3", "4", "5", "6"),
+ "end": DateTuple("7", "8", "9", "10", "11", "12"),
+ },
+ IntervalTuple(
+ DateTuple("1", "2", "3", "4", "5", "6"),
+ DateTuple("7", "8", "9", "10", "11", "12"),
+ None,
+ ),
+ ),
+ (
+ {
+ "start": TimeTuple(
+ "1", "2", "3", TimezoneTuple(True, False, "7", "8", "tz name")
+ ),
+ "end": TimeTuple(
+ "4", "5", "6", TimezoneTuple(False, False, "9", "10", "tz name")
+ ),
+ },
+ IntervalTuple(
+ TimeTuple(
+ "1", "2", "3", TimezoneTuple(True, False, "7", "8", "tz name")
+ ),
+ TimeTuple(
+ "4", "5", "6", TimezoneTuple(False, False, "9", "10", "tz name")
+ ),
+ None,
+ ),
+ ),
+ (
+ {
+ "start": DatetimeTuple(
+ DateTuple("1", "2", "3", "4", "5", "6"),
+ TimeTuple(
+ "7",
+ "8",
+ "9",
+ TimezoneTuple(True, False, "10", "11", "tz name"),
+ ),
+ ),
+ "end": DatetimeTuple(
+ DateTuple("12", "13", "14", "15", "16", "17"),
+ TimeTuple(
+ "18",
+ "19",
+ "20",
+ TimezoneTuple(False, False, "21", "22", "tz name"),
+ ),
+ ),
+ },
+ IntervalTuple(
+ DatetimeTuple(
+ DateTuple("1", "2", "3", "4", "5", "6"),
+ TimeTuple(
+ "7",
+ "8",
+ "9",
+ TimezoneTuple(True, False, "10", "11", "tz name"),
+ ),
+ ),
+ DatetimeTuple(
+ DateTuple("12", "13", "14", "15", "16", "17"),
+ TimeTuple(
+ "18",
+ "19",
+ "20",
+ TimezoneTuple(False, False, "21", "22", "tz name"),
+ ),
+ ),
+ None,
+ ),
+ ),
+ (
+ {
+ "start": DateTuple("1", "2", "3", "4", "5", "6"),
+ "end": None,
+ "duration": DurationTuple("7", "8", "9", "10", "11", "12", "13"),
+ },
+ IntervalTuple(
+ DateTuple("1", "2", "3", "4", "5", "6"),
+ None,
+ DurationTuple("7", "8", "9", "10", "11", "12", "13"),
+ ),
+ ),
+ (
+ {
+ "start": None,
+ "end": TimeTuple(
+ "1", "2", "3", TimezoneTuple(True, False, "4", "5", "tz name")
+ ),
+ "duration": DurationTuple("6", "7", "8", "9", "10", "11", "12"),
+ },
+ IntervalTuple(
+ None,
+ TimeTuple(
+ "1", "2", "3", TimezoneTuple(True, False, "4", "5", "tz name")
+ ),
+ DurationTuple("6", "7", "8", "9", "10", "11", "12"),
+ ),
+ ),
+ )
+
+ for testtuple in testtuples:
+ self.assertEqual(TupleBuilder.build_interval(**testtuple[0]), testtuple[1])
+
+ def test_build_repeating_interval(self):
+ testtuples = (
+ ({}, RepeatingIntervalTuple(None, None, None)),
+ (
+ {
+ "R": True,
+ "interval": IntervalTuple(
+ DateTuple("1", "2", "3", "4", "5", "6"),
+ DateTuple("7", "8", "9", "10", "11", "12"),
+ None,
+ ),
+ },
+ RepeatingIntervalTuple(
+ True,
+ None,
+ IntervalTuple(
+ DateTuple("1", "2", "3", "4", "5", "6"),
+ DateTuple("7", "8", "9", "10", "11", "12"),
+ None,
+ ),
+ ),
+ ),
+ (
+ {
+ "R": False,
+ "Rnn": "1",
+ "interval": IntervalTuple(
+ DatetimeTuple(
+ DateTuple("2", "3", "4", "5", "6", "7"),
+ TimeTuple("8", "9", "10", None),
+ ),
+ DatetimeTuple(
+ DateTuple("11", "12", "13", "14", "15", "16"),
+ TimeTuple("17", "18", "19", None),
+ ),
+ None,
+ ),
+ },
+ RepeatingIntervalTuple(
+ False,
+ "1",
+ IntervalTuple(
+ DatetimeTuple(
+ DateTuple("2", "3", "4", "5", "6", "7"),
+ TimeTuple("8", "9", "10", None),
+ ),
+ DatetimeTuple(
+ DateTuple("11", "12", "13", "14", "15", "16"),
+ TimeTuple("17", "18", "19", None),
+ ),
+ None,
+ ),
+ ),
+ ),
+ )
+
+ for testtuple in testtuples:
+ result = TupleBuilder.build_repeating_interval(**testtuple[0])
+ self.assertEqual(result, testtuple[1])
+
+ def test_build_timezone(self):
+ testtuples = (
+ ({}, TimezoneTuple(None, None, None, None, "")),
+ (
+ {"negative": False, "Z": True, "name": "UTC"},
+ TimezoneTuple(False, True, None, None, "UTC"),
+ ),
+ (
+ {"negative": False, "Z": False, "hh": "1", "mm": "2", "name": "+01:02"},
+ TimezoneTuple(False, False, "1", "2", "+01:02"),
+ ),
+ (
+ {"negative": True, "Z": False, "hh": "1", "mm": "2", "name": "-01:02"},
+ TimezoneTuple(True, False, "1", "2", "-01:02"),
+ ),
+ )
+
+ for testtuple in testtuples:
+ result = TupleBuilder.build_timezone(**testtuple[0])
+ self.assertEqual(result, testtuple[1])
diff --git a/libs/aniso8601/builders/tests/test_python.py b/libs/aniso8601/builders/tests/test_python.py
new file mode 100644
index 000000000..11111a163
--- /dev/null
+++ b/libs/aniso8601/builders/tests/test_python.py
@@ -0,0 +1,1710 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+import datetime
+import unittest
+
+from aniso8601 import compat
+from aniso8601.builders import (
+ DatetimeTuple,
+ DateTuple,
+ DurationTuple,
+ IntervalTuple,
+ Limit,
+ TimeTuple,
+ TimezoneTuple,
+)
+from aniso8601.builders.python import (
+ FractionalComponent,
+ PythonTimeBuilder,
+ _cast_to_fractional_component,
+ fractional_range_check,
+ year_range_check,
+)
+from aniso8601.exceptions import (
+ DayOutOfBoundsError,
+ HoursOutOfBoundsError,
+ ISOFormatError,
+ LeapSecondError,
+ MidnightBoundsError,
+ MinutesOutOfBoundsError,
+ MonthOutOfBoundsError,
+ SecondsOutOfBoundsError,
+ WeekOutOfBoundsError,
+ YearOutOfBoundsError,
+)
+from aniso8601.utcoffset import UTCOffset
+
+
+class TestPythonTimeBuilder_UtiltyFunctions(unittest.TestCase):
+ def test_year_range_check(self):
+ yearlimit = Limit(
+ "Invalid year string.",
+ 0000,
+ 9999,
+ YearOutOfBoundsError,
+ "Year must be between 1..9999.",
+ None,
+ )
+
+ self.assertEqual(year_range_check("1", yearlimit), 1000)
+
+ def test_fractional_range_check(self):
+ limit = Limit(
+ "Invalid string.", -1, 1, ValueError, "Value must be between -1..1.", None
+ )
+
+ self.assertEqual(fractional_range_check(10, "1", limit), 1)
+ self.assertEqual(fractional_range_check(10, "-1", limit), -1)
+ self.assertEqual(
+ fractional_range_check(10, "0.1", limit), FractionalComponent(0, 1)
+ )
+ self.assertEqual(
+ fractional_range_check(10, "-0.1", limit), FractionalComponent(-0, 1)
+ )
+
+ with self.assertRaises(ValueError):
+ fractional_range_check(10, "1.1", limit)
+
+ with self.assertRaises(ValueError):
+ fractional_range_check(10, "-1.1", limit)
+
+ def test_cast_to_fractional_component(self):
+ self.assertEqual(
+ _cast_to_fractional_component(10, "1.1"), FractionalComponent(1, 1)
+ )
+ self.assertEqual(
+ _cast_to_fractional_component(10, "-1.1"), FractionalComponent(-1, 1)
+ )
+
+ self.assertEqual(
+ _cast_to_fractional_component(100, "1.1"), FractionalComponent(1, 10)
+ )
+ self.assertEqual(
+ _cast_to_fractional_component(100, "-1.1"), FractionalComponent(-1, 10)
+ )
+
+
+class TestPythonTimeBuilder(unittest.TestCase):
+ def test_build_date(self):
+ testtuples = (
+ (
+ {
+ "YYYY": "2013",
+ "MM": None,
+ "DD": None,
+ "Www": None,
+ "D": None,
+ "DDD": None,
+ },
+ datetime.date(2013, 1, 1),
+ ),
+ (
+ {
+ "YYYY": "0001",
+ "MM": None,
+ "DD": None,
+ "Www": None,
+ "D": None,
+ "DDD": None,
+ },
+ datetime.date(1, 1, 1),
+ ),
+ (
+ {
+ "YYYY": "1900",
+ "MM": None,
+ "DD": None,
+ "Www": None,
+ "D": None,
+ "DDD": None,
+ },
+ datetime.date(1900, 1, 1),
+ ),
+ (
+ {
+ "YYYY": "1981",
+ "MM": "04",
+ "DD": "05",
+ "Www": None,
+ "D": None,
+ "DDD": None,
+ },
+ datetime.date(1981, 4, 5),
+ ),
+ (
+ {
+ "YYYY": "1981",
+ "MM": "04",
+ "DD": None,
+ "Www": None,
+ "D": None,
+ "DDD": None,
+ },
+ datetime.date(1981, 4, 1),
+ ),
+ (
+ {
+ "YYYY": "1981",
+ "MM": None,
+ "DD": None,
+ "Www": None,
+ "D": None,
+ "DDD": "095",
+ },
+ datetime.date(1981, 4, 5),
+ ),
+ (
+ {
+ "YYYY": "1981",
+ "MM": None,
+ "DD": None,
+ "Www": None,
+ "D": None,
+ "DDD": "365",
+ },
+ datetime.date(1981, 12, 31),
+ ),
+ (
+ {
+ "YYYY": "1980",
+ "MM": None,
+ "DD": None,
+ "Www": None,
+ "D": None,
+ "DDD": "366",
+ },
+ datetime.date(1980, 12, 31),
+ ),
+ # Make sure we shift in zeros
+ (
+ {
+ "YYYY": "1",
+ "MM": None,
+ "DD": None,
+ "Www": None,
+ "D": None,
+ "DDD": None,
+ },
+ datetime.date(1000, 1, 1),
+ ),
+ (
+ {
+ "YYYY": "12",
+ "MM": None,
+ "DD": None,
+ "Www": None,
+ "D": None,
+ "DDD": None,
+ },
+ datetime.date(1200, 1, 1),
+ ),
+ (
+ {
+ "YYYY": "123",
+ "MM": None,
+ "DD": None,
+ "Www": None,
+ "D": None,
+ "DDD": None,
+ },
+ datetime.date(1230, 1, 1),
+ ),
+ )
+
+ for testtuple in testtuples:
+ result = PythonTimeBuilder.build_date(**testtuple[0])
+ self.assertEqual(result, testtuple[1])
+
+ # Test weekday
+ testtuples = (
+ (
+ {
+ "YYYY": "2004",
+ "MM": None,
+ "DD": None,
+ "Www": "53",
+ "D": None,
+ "DDD": None,
+ },
+ datetime.date(2004, 12, 27),
+ 0,
+ ),
+ (
+ {
+ "YYYY": "2009",
+ "MM": None,
+ "DD": None,
+ "Www": "01",
+ "D": None,
+ "DDD": None,
+ },
+ datetime.date(2008, 12, 29),
+ 0,
+ ),
+ (
+ {
+ "YYYY": "2010",
+ "MM": None,
+ "DD": None,
+ "Www": "01",
+ "D": None,
+ "DDD": None,
+ },
+ datetime.date(2010, 1, 4),
+ 0,
+ ),
+ (
+ {
+ "YYYY": "2009",
+ "MM": None,
+ "DD": None,
+ "Www": "53",
+ "D": None,
+ "DDD": None,
+ },
+ datetime.date(2009, 12, 28),
+ 0,
+ ),
+ (
+ {
+ "YYYY": "2009",
+ "MM": None,
+ "DD": None,
+ "Www": "01",
+ "D": "1",
+ "DDD": None,
+ },
+ datetime.date(2008, 12, 29),
+ 0,
+ ),
+ (
+ {
+ "YYYY": "2009",
+ "MM": None,
+ "DD": None,
+ "Www": "53",
+ "D": "7",
+ "DDD": None,
+ },
+ datetime.date(2010, 1, 3),
+ 6,
+ ),
+ (
+ {
+ "YYYY": "2010",
+ "MM": None,
+ "DD": None,
+ "Www": "01",
+ "D": "1",
+ "DDD": None,
+ },
+ datetime.date(2010, 1, 4),
+ 0,
+ ),
+ (
+ {
+ "YYYY": "2004",
+ "MM": None,
+ "DD": None,
+ "Www": "53",
+ "D": "6",
+ "DDD": None,
+ },
+ datetime.date(2005, 1, 1),
+ 5,
+ ),
+ )
+
+ for testtuple in testtuples:
+ result = PythonTimeBuilder.build_date(**testtuple[0])
+ self.assertEqual(result, testtuple[1])
+ self.assertEqual(result.weekday(), testtuple[2])
+
+ def test_build_time(self):
+ testtuples = (
+ ({}, datetime.time()),
+ ({"hh": "12.5"}, datetime.time(hour=12, minute=30)),
+ (
+ {"hh": "23.99999999997"},
+ datetime.time(hour=23, minute=59, second=59, microsecond=999999),
+ ),
+ ({"hh": "1", "mm": "23"}, datetime.time(hour=1, minute=23)),
+ (
+ {"hh": "1", "mm": "23.4567"},
+ datetime.time(hour=1, minute=23, second=27, microsecond=402000),
+ ),
+ (
+ {"hh": "14", "mm": "43.999999997"},
+ datetime.time(hour=14, minute=43, second=59, microsecond=999999),
+ ),
+ (
+ {"hh": "1", "mm": "23", "ss": "45"},
+ datetime.time(hour=1, minute=23, second=45),
+ ),
+ (
+ {"hh": "23", "mm": "21", "ss": "28.512400"},
+ datetime.time(hour=23, minute=21, second=28, microsecond=512400),
+ ),
+ (
+ {"hh": "01", "mm": "03", "ss": "11.858714"},
+ datetime.time(hour=1, minute=3, second=11, microsecond=858714),
+ ),
+ (
+ {"hh": "14", "mm": "43", "ss": "59.9999997"},
+ datetime.time(hour=14, minute=43, second=59, microsecond=999999),
+ ),
+ ({"hh": "24"}, datetime.time(hour=0)),
+ ({"hh": "24", "mm": "00"}, datetime.time(hour=0)),
+ ({"hh": "24", "mm": "00", "ss": "00"}, datetime.time(hour=0)),
+ (
+ {"tz": TimezoneTuple(False, None, "00", "00", "UTC")},
+ datetime.time(tzinfo=UTCOffset(name="UTC", minutes=0)),
+ ),
+ (
+ {
+ "hh": "23",
+ "mm": "21",
+ "ss": "28.512400",
+ "tz": TimezoneTuple(False, None, "00", "00", "+00:00"),
+ },
+ datetime.time(
+ hour=23,
+ minute=21,
+ second=28,
+ microsecond=512400,
+ tzinfo=UTCOffset(name="+00:00", minutes=0),
+ ),
+ ),
+ (
+ {
+ "hh": "1",
+ "mm": "23",
+ "tz": TimezoneTuple(False, None, "01", "00", "+1"),
+ },
+ datetime.time(
+ hour=1, minute=23, tzinfo=UTCOffset(name="+1", minutes=60)
+ ),
+ ),
+ (
+ {
+ "hh": "1",
+ "mm": "23.4567",
+ "tz": TimezoneTuple(True, None, "01", "00", "-1"),
+ },
+ datetime.time(
+ hour=1,
+ minute=23,
+ second=27,
+ microsecond=402000,
+ tzinfo=UTCOffset(name="-1", minutes=-60),
+ ),
+ ),
+ (
+ {
+ "hh": "23",
+ "mm": "21",
+ "ss": "28.512400",
+ "tz": TimezoneTuple(False, None, "01", "30", "+1:30"),
+ },
+ datetime.time(
+ hour=23,
+ minute=21,
+ second=28,
+ microsecond=512400,
+ tzinfo=UTCOffset(name="+1:30", minutes=90),
+ ),
+ ),
+ (
+ {
+ "hh": "23",
+ "mm": "21",
+ "ss": "28.512400",
+ "tz": TimezoneTuple(False, None, "11", "15", "+11:15"),
+ },
+ datetime.time(
+ hour=23,
+ minute=21,
+ second=28,
+ microsecond=512400,
+ tzinfo=UTCOffset(name="+11:15", minutes=675),
+ ),
+ ),
+ (
+ {
+ "hh": "23",
+ "mm": "21",
+ "ss": "28.512400",
+ "tz": TimezoneTuple(False, None, "12", "34", "+12:34"),
+ },
+ datetime.time(
+ hour=23,
+ minute=21,
+ second=28,
+ microsecond=512400,
+ tzinfo=UTCOffset(name="+12:34", minutes=754),
+ ),
+ ),
+ (
+ {
+ "hh": "23",
+ "mm": "21",
+ "ss": "28.512400",
+ "tz": TimezoneTuple(False, None, "00", "00", "UTC"),
+ },
+ datetime.time(
+ hour=23,
+ minute=21,
+ second=28,
+ microsecond=512400,
+ tzinfo=UTCOffset(name="UTC", minutes=0),
+ ),
+ ),
+ # Make sure we truncate, not round
+ # https://bitbucket.org/nielsenb/aniso8601/issues/10/sub-microsecond-precision-in-durations-is
+ # https://bitbucket.org/nielsenb/aniso8601/issues/21/sub-microsecond-precision-is-lost-when
+ (
+ {"hh": "14.9999999999999999"},
+ datetime.time(hour=14, minute=59, second=59, microsecond=999999),
+ ),
+ ({"mm": "0.00000000999"}, datetime.time()),
+ ({"mm": "0.0000000999"}, datetime.time(microsecond=5)),
+ ({"ss": "0.0000001"}, datetime.time()),
+ ({"ss": "2.0000048"}, datetime.time(second=2, microsecond=4)),
+ )
+
+ for testtuple in testtuples:
+ result = PythonTimeBuilder.build_time(**testtuple[0])
+ self.assertEqual(result, testtuple[1])
+
+ def test_build_datetime(self):
+ testtuples = (
+ (
+ (
+ DateTuple("2019", "06", "05", None, None, None),
+ TimeTuple("01", "03", "11.858714", None),
+ ),
+ datetime.datetime(
+ 2019, 6, 5, hour=1, minute=3, second=11, microsecond=858714
+ ),
+ ),
+ (
+ (
+ DateTuple("1234", "02", "03", None, None, None),
+ TimeTuple("23", "21", "28.512400", None),
+ ),
+ datetime.datetime(
+ 1234, 2, 3, hour=23, minute=21, second=28, microsecond=512400
+ ),
+ ),
+ (
+ (
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple(
+ "23",
+ "21",
+ "28.512400",
+ TimezoneTuple(False, None, "11", "15", "+11:15"),
+ ),
+ ),
+ datetime.datetime(
+ 1981,
+ 4,
+ 5,
+ hour=23,
+ minute=21,
+ second=28,
+ microsecond=512400,
+ tzinfo=UTCOffset(name="+11:15", minutes=675),
+ ),
+ ),
+ )
+
+ for testtuple in testtuples:
+ result = PythonTimeBuilder.build_datetime(*testtuple[0])
+ self.assertEqual(result, testtuple[1])
+
+ def test_build_duration(self):
+ testtuples = (
+ (
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6",
+ },
+ datetime.timedelta(days=428, hours=4, minutes=54, seconds=6),
+ ),
+ (
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6.5",
+ },
+ datetime.timedelta(days=428, hours=4, minutes=54, seconds=6.5),
+ ),
+ ({"PnY": "1", "PnM": "2", "PnD": "3"}, datetime.timedelta(days=428)),
+ ({"PnY": "1", "PnM": "2", "PnD": "3.5"}, datetime.timedelta(days=428.5)),
+ (
+ {"TnH": "4", "TnM": "54", "TnS": "6.5"},
+ datetime.timedelta(hours=4, minutes=54, seconds=6.5),
+ ),
+ (
+ {"TnH": "1", "TnM": "3", "TnS": "11.858714"},
+ datetime.timedelta(hours=1, minutes=3, seconds=11, microseconds=858714),
+ ),
+ (
+ {"TnH": "4", "TnM": "54", "TnS": "28.512400"},
+ datetime.timedelta(
+ hours=4, minutes=54, seconds=28, microseconds=512400
+ ),
+ ),
+ # Make sure we truncate, not round
+ # https://bitbucket.org/nielsenb/aniso8601/issues/10/sub-microsecond-precision-in-durations-is
+ # https://bitbucket.org/nielsenb/aniso8601/issues/21/sub-microsecond-precision-is-lost-when
+ (
+ {"PnY": "1999.9999999999999999"},
+ datetime.timedelta(days=729999, seconds=86399, microseconds=999999),
+ ),
+ (
+ {"PnM": "1.9999999999999999"},
+ datetime.timedelta(
+ days=59, hours=23, minutes=59, seconds=59, microseconds=999999
+ ),
+ ),
+ (
+ {"PnW": "1.9999999999999999"},
+ datetime.timedelta(
+ days=13, hours=23, minutes=59, seconds=59, microseconds=999999
+ ),
+ ),
+ (
+ {"PnD": "1.9999999999999999"},
+ datetime.timedelta(
+ days=1, hours=23, minutes=59, seconds=59, microseconds=999999
+ ),
+ ),
+ (
+ {"TnH": "14.9999999999999999"},
+ datetime.timedelta(
+ hours=14, minutes=59, seconds=59, microseconds=999999
+ ),
+ ),
+ ({"TnM": "0.00000000999"}, datetime.timedelta(0)),
+ ({"TnM": "0.0000000999"}, datetime.timedelta(microseconds=5)),
+ ({"TnS": "0.0000001"}, datetime.timedelta(0)),
+ ({"TnS": "2.0000048"}, datetime.timedelta(seconds=2, microseconds=4)),
+ ({"PnY": "1"}, datetime.timedelta(days=365)),
+ ({"PnY": "1.5"}, datetime.timedelta(days=547.5)),
+ ({"PnM": "1"}, datetime.timedelta(days=30)),
+ ({"PnM": "1.5"}, datetime.timedelta(days=45)),
+ ({"PnW": "1"}, datetime.timedelta(days=7)),
+ ({"PnW": "1.5"}, datetime.timedelta(days=10.5)),
+ ({"PnD": "1"}, datetime.timedelta(days=1)),
+ ({"PnD": "1.5"}, datetime.timedelta(days=1.5)),
+ (
+ {
+ "PnY": "0003",
+ "PnM": "06",
+ "PnD": "04",
+ "TnH": "12",
+ "TnM": "30",
+ "TnS": "05",
+ },
+ datetime.timedelta(days=1279, hours=12, minutes=30, seconds=5),
+ ),
+ (
+ {
+ "PnY": "0003",
+ "PnM": "06",
+ "PnD": "04",
+ "TnH": "12",
+ "TnM": "30",
+ "TnS": "05.5",
+ },
+ datetime.timedelta(days=1279, hours=12, minutes=30, seconds=5.5),
+ ),
+ # Test timedelta limit
+ (
+ {"PnD": "999999999", "TnH": "23", "TnM": "59", "TnS": "59.999999"},
+ datetime.timedelta.max,
+ ),
+ # Make sure we truncate, not round
+ # https://bitbucket.org/nielsenb/aniso8601/issues/10/sub-microsecond-precision-in-durations-is
+ (
+ {
+ "PnY": "0001",
+ "PnM": "02",
+ "PnD": "03",
+ "TnH": "14",
+ "TnM": "43",
+ "TnS": "59.9999997",
+ },
+ datetime.timedelta(
+ days=428, hours=14, minutes=43, seconds=59, microseconds=999999
+ ),
+ ),
+ # Verify overflows
+ ({"TnH": "36"}, datetime.timedelta(days=1, hours=12)),
+ )
+
+ for testtuple in testtuples:
+ result = PythonTimeBuilder.build_duration(**testtuple[0])
+ self.assertEqual(result, testtuple[1])
+
+ def test_build_interval(self):
+ testtuples = (
+ (
+ {
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "duration": DurationTuple(None, "1", None, None, None, None, None),
+ },
+ datetime.datetime(year=1981, month=4, day=5, hour=1, minute=1),
+ datetime.datetime(year=1981, month=3, day=6, hour=1, minute=1),
+ ),
+ (
+ {
+ "end": DateTuple("1981", "04", "05", None, None, None),
+ "duration": DurationTuple(None, "1", None, None, None, None, None),
+ },
+ datetime.date(year=1981, month=4, day=5),
+ datetime.date(year=1981, month=3, day=6),
+ ),
+ (
+ {
+ "end": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ "1.5", None, None, None, None, None, None
+ ),
+ },
+ datetime.date(year=2018, month=3, day=6),
+ datetime.datetime(year=2016, month=9, day=4, hour=12),
+ ),
+ (
+ {
+ "end": DateTuple("2014", "11", "12", None, None, None),
+ "duration": DurationTuple(None, None, None, None, "1", None, None),
+ },
+ datetime.date(year=2014, month=11, day=12),
+ datetime.datetime(year=2014, month=11, day=11, hour=23),
+ ),
+ (
+ {
+ "end": DateTuple("2014", "11", "12", None, None, None),
+ "duration": DurationTuple(None, None, None, None, "4", "54", "6.5"),
+ },
+ datetime.date(year=2014, month=11, day=12),
+ datetime.datetime(
+ year=2014,
+ month=11,
+ day=11,
+ hour=19,
+ minute=5,
+ second=53,
+ microsecond=500000,
+ ),
+ ),
+ (
+ {
+ "end": DatetimeTuple(
+ DateTuple("2050", "03", "01", None, None, None),
+ TimeTuple(
+ "13",
+ "00",
+ "00",
+ TimezoneTuple(False, True, None, None, "Z"),
+ ),
+ ),
+ "duration": DurationTuple(None, None, None, None, "10", None, None),
+ },
+ datetime.datetime(
+ year=2050,
+ month=3,
+ day=1,
+ hour=13,
+ tzinfo=UTCOffset(name="UTC", minutes=0),
+ ),
+ datetime.datetime(
+ year=2050,
+ month=3,
+ day=1,
+ hour=3,
+ tzinfo=UTCOffset(name="UTC", minutes=0),
+ ),
+ ),
+ # Make sure we truncate, not round
+ # https://bitbucket.org/nielsenb/aniso8601/issues/10/sub-microsecond-precision-in-durations-is
+ # https://bitbucket.org/nielsenb/aniso8601/issues/21/sub-microsecond-precision-is-lost-when
+ (
+ {
+ "end": DateTuple("2000", "01", "01", None, None, None),
+ "duration": DurationTuple(
+ "1999.9999999999999999", None, None, None, None, None, None
+ ),
+ },
+ datetime.date(year=2000, month=1, day=1),
+ datetime.datetime(
+ year=1, month=4, day=30, hour=0, minute=0, second=0, microsecond=1
+ ),
+ ),
+ (
+ {
+ "end": DateTuple("1989", "03", "01", None, None, None),
+ "duration": DurationTuple(
+ None, "1.9999999999999999", None, None, None, None, None
+ ),
+ },
+ datetime.date(year=1989, month=3, day=1),
+ datetime.datetime(
+ year=1988,
+ month=12,
+ day=31,
+ hour=0,
+ minute=0,
+ second=0,
+ microsecond=1,
+ ),
+ ),
+ (
+ {
+ "end": DateTuple("1989", "03", "01", None, None, None),
+ "duration": DurationTuple(
+ None, None, "1.9999999999999999", None, None, None, None
+ ),
+ },
+ datetime.date(year=1989, month=3, day=1),
+ datetime.datetime(
+ year=1989,
+ month=2,
+ day=15,
+ hour=0,
+ minute=0,
+ second=0,
+ microsecond=1,
+ ),
+ ),
+ (
+ {
+ "end": DateTuple("1989", "03", "01", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, "1.9999999999999999", None, None, None
+ ),
+ },
+ datetime.date(year=1989, month=3, day=1),
+ datetime.datetime(
+ year=1989,
+ month=2,
+ day=27,
+ hour=0,
+ minute=0,
+ second=0,
+ microsecond=1,
+ ),
+ ),
+ (
+ {
+ "end": DateTuple("2001", "01", "01", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, "14.9999999999999999", None, None
+ ),
+ },
+ datetime.date(year=2001, month=1, day=1),
+ datetime.datetime(
+ year=2000,
+ month=12,
+ day=31,
+ hour=9,
+ minute=0,
+ second=0,
+ microsecond=1,
+ ),
+ ),
+ (
+ {
+ "end": DateTuple("2001", "01", "01", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, None, "0.00000000999", None
+ ),
+ },
+ datetime.date(year=2001, month=1, day=1),
+ datetime.datetime(year=2001, month=1, day=1),
+ ),
+ (
+ {
+ "end": DateTuple("2001", "01", "01", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, None, "0.0000000999", None
+ ),
+ },
+ datetime.date(year=2001, month=1, day=1),
+ datetime.datetime(
+ year=2000,
+ month=12,
+ day=31,
+ hour=23,
+ minute=59,
+ second=59,
+ microsecond=999995,
+ ),
+ ),
+ (
+ {
+ "end": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, None, None, "0.0000001"
+ ),
+ },
+ datetime.date(year=2018, month=3, day=6),
+ datetime.datetime(year=2018, month=3, day=6),
+ ),
+ (
+ {
+ "end": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, None, None, "2.0000048"
+ ),
+ },
+ datetime.date(year=2018, month=3, day=6),
+ datetime.datetime(
+ year=2018,
+ month=3,
+ day=5,
+ hour=23,
+ minute=59,
+ second=57,
+ microsecond=999996,
+ ),
+ ),
+ (
+ {
+ "start": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "duration": DurationTuple(None, "1", None, "1", None, "1", None),
+ },
+ datetime.datetime(year=1981, month=4, day=5, hour=1, minute=1),
+ datetime.datetime(year=1981, month=5, day=6, hour=1, minute=2),
+ ),
+ (
+ {
+ "start": DateTuple("1981", "04", "05", None, None, None),
+ "duration": DurationTuple(None, "1", None, "1", None, None, None),
+ },
+ datetime.date(year=1981, month=4, day=5),
+ datetime.date(year=1981, month=5, day=6),
+ ),
+ (
+ {
+ "start": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ None, "2.5", None, None, None, None, None
+ ),
+ },
+ datetime.date(year=2018, month=3, day=6),
+ datetime.date(year=2018, month=5, day=20),
+ ),
+ (
+ {
+ "start": DateTuple("2014", "11", "12", None, None, None),
+ "duration": DurationTuple(None, None, None, None, "1", None, None),
+ },
+ datetime.date(year=2014, month=11, day=12),
+ datetime.datetime(year=2014, month=11, day=12, hour=1, minute=0),
+ ),
+ (
+ {
+ "start": DateTuple("2014", "11", "12", None, None, None),
+ "duration": DurationTuple(None, None, None, None, "4", "54", "6.5"),
+ },
+ datetime.date(year=2014, month=11, day=12),
+ datetime.datetime(
+ year=2014,
+ month=11,
+ day=12,
+ hour=4,
+ minute=54,
+ second=6,
+ microsecond=500000,
+ ),
+ ),
+ (
+ {
+ "start": DatetimeTuple(
+ DateTuple("2050", "03", "01", None, None, None),
+ TimeTuple(
+ "13",
+ "00",
+ "00",
+ TimezoneTuple(False, True, None, None, "Z"),
+ ),
+ ),
+ "duration": DurationTuple(None, None, None, None, "10", None, None),
+ },
+ datetime.datetime(
+ year=2050,
+ month=3,
+ day=1,
+ hour=13,
+ tzinfo=UTCOffset(name="UTC", minutes=0),
+ ),
+ datetime.datetime(
+ year=2050,
+ month=3,
+ day=1,
+ hour=23,
+ tzinfo=UTCOffset(name="UTC", minutes=0),
+ ),
+ ),
+ # Make sure we truncate, not round
+ # https://bitbucket.org/nielsenb/aniso8601/issues/10/sub-microsecond-precision-in-durations-is
+ (
+ {
+ "start": DateTuple("0001", "01", "01", None, None, None),
+ "duration": DurationTuple(
+ "1999.9999999999999999", None, None, None, None, None, None
+ ),
+ },
+ datetime.date(year=1, month=1, day=1),
+ datetime.datetime(
+ year=1999,
+ month=9,
+ day=3,
+ hour=23,
+ minute=59,
+ second=59,
+ microsecond=999999,
+ ),
+ ),
+ (
+ {
+ "start": DateTuple("1989", "03", "01", None, None, None),
+ "duration": DurationTuple(
+ None, "1.9999999999999999", None, None, None, None, None
+ ),
+ },
+ datetime.date(year=1989, month=3, day=1),
+ datetime.datetime(
+ year=1989,
+ month=4,
+ day=29,
+ hour=23,
+ minute=59,
+ second=59,
+ microsecond=999999,
+ ),
+ ),
+ (
+ {
+ "start": DateTuple("1989", "03", "01", None, None, None),
+ "duration": DurationTuple(
+ None, None, "1.9999999999999999", None, None, None, None
+ ),
+ },
+ datetime.date(year=1989, month=3, day=1),
+ datetime.datetime(
+ year=1989,
+ month=3,
+ day=14,
+ hour=23,
+ minute=59,
+ second=59,
+ microsecond=999999,
+ ),
+ ),
+ (
+ {
+ "start": DateTuple("1989", "03", "01", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, "1.9999999999999999", None, None, None
+ ),
+ },
+ datetime.date(year=1989, month=3, day=1),
+ datetime.datetime(
+ year=1989,
+ month=3,
+ day=2,
+ hour=23,
+ minute=59,
+ second=59,
+ microsecond=999999,
+ ),
+ ),
+ (
+ {
+ "start": DateTuple("2001", "01", "01", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, "14.9999999999999999", None, None
+ ),
+ },
+ datetime.date(year=2001, month=1, day=1),
+ datetime.datetime(
+ year=2001,
+ month=1,
+ day=1,
+ hour=14,
+ minute=59,
+ second=59,
+ microsecond=999999,
+ ),
+ ),
+ (
+ {
+ "start": DateTuple("2001", "01", "01", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, None, "0.00000000999", None
+ ),
+ },
+ datetime.date(year=2001, month=1, day=1),
+ datetime.datetime(year=2001, month=1, day=1),
+ ),
+ (
+ {
+ "start": DateTuple("2001", "01", "01", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, None, "0.0000000999", None
+ ),
+ },
+ datetime.date(year=2001, month=1, day=1),
+ datetime.datetime(
+ year=2001, month=1, day=1, hour=0, minute=0, second=0, microsecond=5
+ ),
+ ),
+ (
+ {
+ "start": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, None, None, "0.0000001"
+ ),
+ },
+ datetime.date(year=2018, month=3, day=6),
+ datetime.datetime(year=2018, month=3, day=6),
+ ),
+ (
+ {
+ "start": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, None, None, "2.0000048"
+ ),
+ },
+ datetime.date(year=2018, month=3, day=6),
+ datetime.datetime(
+ year=2018, month=3, day=6, hour=0, minute=0, second=2, microsecond=4
+ ),
+ ),
+ (
+ {
+ "start": DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ },
+ datetime.datetime(year=1980, month=3, day=5, hour=1, minute=1),
+ datetime.datetime(year=1981, month=4, day=5, hour=1, minute=1),
+ ),
+ (
+ {
+ "start": DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "end": DateTuple("1981", "04", "05", None, None, None),
+ },
+ datetime.datetime(year=1980, month=3, day=5, hour=1, minute=1),
+ datetime.date(year=1981, month=4, day=5),
+ ),
+ (
+ {
+ "start": DateTuple("1980", "03", "05", None, None, None),
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ },
+ datetime.date(year=1980, month=3, day=5),
+ datetime.datetime(year=1981, month=4, day=5, hour=1, minute=1),
+ ),
+ (
+ {
+ "start": DateTuple("1980", "03", "05", None, None, None),
+ "end": DateTuple("1981", "04", "05", None, None, None),
+ },
+ datetime.date(year=1980, month=3, day=5),
+ datetime.date(year=1981, month=4, day=5),
+ ),
+ (
+ {
+ "start": DateTuple("1981", "04", "05", None, None, None),
+ "end": DateTuple("1980", "03", "05", None, None, None),
+ },
+ datetime.date(year=1981, month=4, day=5),
+ datetime.date(year=1980, month=3, day=5),
+ ),
+ (
+ {
+ "start": DatetimeTuple(
+ DateTuple("2050", "03", "01", None, None, None),
+ TimeTuple(
+ "13",
+ "00",
+ "00",
+ TimezoneTuple(False, True, None, None, "Z"),
+ ),
+ ),
+ "end": DatetimeTuple(
+ DateTuple("2050", "05", "11", None, None, None),
+ TimeTuple(
+ "15",
+ "30",
+ "00",
+ TimezoneTuple(False, True, None, None, "Z"),
+ ),
+ ),
+ },
+ datetime.datetime(
+ year=2050,
+ month=3,
+ day=1,
+ hour=13,
+ tzinfo=UTCOffset(name="UTC", minutes=0),
+ ),
+ datetime.datetime(
+ year=2050,
+ month=5,
+ day=11,
+ hour=15,
+ minute=30,
+ tzinfo=UTCOffset(name="UTC", minutes=0),
+ ),
+ ),
+ # Test concise representation
+ (
+ {
+ "start": DateTuple("2020", "01", "01", None, None, None),
+ "end": DateTuple(None, None, "02", None, None, None),
+ },
+ datetime.date(year=2020, month=1, day=1),
+ datetime.date(year=2020, month=1, day=2),
+ ),
+ (
+ {
+ "start": DateTuple("2008", "02", "15", None, None, None),
+ "end": DateTuple(None, "03", "14", None, None, None),
+ },
+ datetime.date(year=2008, month=2, day=15),
+ datetime.date(year=2008, month=3, day=14),
+ ),
+ (
+ {
+ "start": DatetimeTuple(
+ DateTuple("2007", "12", "14", None, None, None),
+ TimeTuple("13", "30", None, None),
+ ),
+ "end": TimeTuple("15", "30", None, None),
+ },
+ datetime.datetime(year=2007, month=12, day=14, hour=13, minute=30),
+ datetime.datetime(year=2007, month=12, day=14, hour=15, minute=30),
+ ),
+ (
+ {
+ "start": DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple("09", "00", None, None),
+ ),
+ "end": DatetimeTuple(
+ DateTuple(None, None, "15", None, None, None),
+ TimeTuple("17", "00", None, None),
+ ),
+ },
+ datetime.datetime(year=2007, month=11, day=13, hour=9),
+ datetime.datetime(year=2007, month=11, day=15, hour=17),
+ ),
+ (
+ {
+ "start": DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple("00", "00", None, None),
+ ),
+ "end": DatetimeTuple(
+ DateTuple(None, None, "16", None, None, None),
+ TimeTuple("00", "00", None, None),
+ ),
+ },
+ datetime.datetime(year=2007, month=11, day=13),
+ datetime.datetime(year=2007, month=11, day=16),
+ ),
+ (
+ {
+ "start": DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple(
+ "09",
+ "00",
+ None,
+ TimezoneTuple(False, True, None, None, "Z"),
+ ),
+ ),
+ "end": DatetimeTuple(
+ DateTuple(None, None, "15", None, None, None),
+ TimeTuple("17", "00", None, None),
+ ),
+ },
+ datetime.datetime(
+ year=2007,
+ month=11,
+ day=13,
+ hour=9,
+ tzinfo=UTCOffset(name="UTC", minutes=0),
+ ),
+ datetime.datetime(
+ year=2007,
+ month=11,
+ day=15,
+ hour=17,
+ tzinfo=UTCOffset(name="UTC", minutes=0),
+ ),
+ ),
+ (
+ {
+ "start": DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple("09", "00", None, None),
+ ),
+ "end": TimeTuple("12", "34.567", None, None),
+ },
+ datetime.datetime(year=2007, month=11, day=13, hour=9),
+ datetime.datetime(
+ year=2007,
+ month=11,
+ day=13,
+ hour=12,
+ minute=34,
+ second=34,
+ microsecond=20000,
+ ),
+ ),
+ (
+ {
+ "start": DateTuple("2007", "11", "13", None, None, None),
+ "end": TimeTuple("12", "34", None, None),
+ },
+ datetime.date(year=2007, month=11, day=13),
+ datetime.datetime(year=2007, month=11, day=13, hour=12, minute=34),
+ ),
+ # Make sure we truncate, not round
+ # https://bitbucket.org/nielsenb/aniso8601/issues/10/sub-microsecond-precision-in-durations-is
+ (
+ {
+ "start": DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00.0000001", None),
+ ),
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("14", "43", "59.9999997", None),
+ ),
+ },
+ datetime.datetime(year=1980, month=3, day=5, hour=1, minute=1),
+ datetime.datetime(
+ year=1981,
+ month=4,
+ day=5,
+ hour=14,
+ minute=43,
+ second=59,
+ microsecond=999999,
+ ),
+ ),
+ )
+
+ for testtuple in testtuples:
+ result = PythonTimeBuilder.build_interval(**testtuple[0])
+ self.assertEqual(result[0], testtuple[1])
+ self.assertEqual(result[1], testtuple[2])
+
+ def test_build_repeating_interval(self):
+ args = {
+ "Rnn": "3",
+ "interval": IntervalTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ None,
+ DurationTuple(None, None, None, "1", None, None, None),
+ ),
+ }
+ results = list(PythonTimeBuilder.build_repeating_interval(**args))
+
+ self.assertEqual(results[0], datetime.date(year=1981, month=4, day=5))
+ self.assertEqual(results[1], datetime.date(year=1981, month=4, day=6))
+ self.assertEqual(results[2], datetime.date(year=1981, month=4, day=7))
+
+ args = {
+ "Rnn": "11",
+ "interval": IntervalTuple(
+ None,
+ DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ DurationTuple(None, None, None, None, "1", "2", None),
+ ),
+ }
+ results = list(PythonTimeBuilder.build_repeating_interval(**args))
+
+ for dateindex in compat.range(0, 11):
+ self.assertEqual(
+ results[dateindex],
+ datetime.datetime(year=1980, month=3, day=5, hour=1, minute=1)
+ - dateindex * datetime.timedelta(hours=1, minutes=2),
+ )
+
+ args = {
+ "Rnn": "2",
+ "interval": IntervalTuple(
+ DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ None,
+ ),
+ }
+ results = list(PythonTimeBuilder.build_repeating_interval(**args))
+
+ self.assertEqual(
+ results[0], datetime.datetime(year=1980, month=3, day=5, hour=1, minute=1)
+ )
+ self.assertEqual(
+ results[1], datetime.datetime(year=1981, month=4, day=5, hour=1, minute=1)
+ )
+
+ args = {
+ "Rnn": "2",
+ "interval": IntervalTuple(
+ DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ None,
+ ),
+ }
+ results = list(PythonTimeBuilder.build_repeating_interval(**args))
+
+ self.assertEqual(
+ results[0], datetime.datetime(year=1980, month=3, day=5, hour=1, minute=1)
+ )
+ self.assertEqual(
+ results[1], datetime.datetime(year=1981, month=4, day=5, hour=1, minute=1)
+ )
+
+ args = {
+ "R": True,
+ "interval": IntervalTuple(
+ None,
+ DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ DurationTuple(None, None, None, None, "1", "2", None),
+ ),
+ }
+ resultgenerator = PythonTimeBuilder.build_repeating_interval(**args)
+
+ # Test the first 11 generated
+ for dateindex in compat.range(0, 11):
+ self.assertEqual(
+ next(resultgenerator),
+ datetime.datetime(year=1980, month=3, day=5, hour=1, minute=1)
+ - dateindex * datetime.timedelta(hours=1, minutes=2),
+ )
+
+ args = {
+ "R": True,
+ "interval": IntervalTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ None,
+ DurationTuple(None, None, None, "1", None, None, None),
+ ),
+ }
+ resultgenerator = PythonTimeBuilder.build_repeating_interval(**args)
+
+ # Test the first 11 generated
+ for dateindex in compat.range(0, 11):
+ self.assertEqual(
+ next(resultgenerator),
+ (
+ datetime.datetime(year=1981, month=4, day=5, hour=0, minute=0)
+ + dateindex * datetime.timedelta(days=1)
+ ).date(),
+ )
+
+ def test_build_timezone(self):
+ testtuples = (
+ ({"Z": True, "name": "Z"}, datetime.timedelta(hours=0), "UTC"),
+ (
+ {"negative": False, "hh": "00", "mm": "00", "name": "+00:00"},
+ datetime.timedelta(hours=0),
+ "+00:00",
+ ),
+ (
+ {"negative": False, "hh": "01", "mm": "00", "name": "+01:00"},
+ datetime.timedelta(hours=1),
+ "+01:00",
+ ),
+ (
+ {"negative": True, "hh": "01", "mm": "00", "name": "-01:00"},
+ -datetime.timedelta(hours=1),
+ "-01:00",
+ ),
+ (
+ {"negative": False, "hh": "00", "mm": "12", "name": "+00:12"},
+ datetime.timedelta(minutes=12),
+ "+00:12",
+ ),
+ (
+ {"negative": False, "hh": "01", "mm": "23", "name": "+01:23"},
+ datetime.timedelta(hours=1, minutes=23),
+ "+01:23",
+ ),
+ (
+ {"negative": True, "hh": "01", "mm": "23", "name": "-01:23"},
+ -datetime.timedelta(hours=1, minutes=23),
+ "-01:23",
+ ),
+ (
+ {"negative": False, "hh": "00", "name": "+00"},
+ datetime.timedelta(hours=0),
+ "+00",
+ ),
+ (
+ {"negative": False, "hh": "01", "name": "+01"},
+ datetime.timedelta(hours=1),
+ "+01",
+ ),
+ (
+ {"negative": True, "hh": "01", "name": "-01"},
+ -datetime.timedelta(hours=1),
+ "-01",
+ ),
+ (
+ {"negative": False, "hh": "12", "name": "+12"},
+ datetime.timedelta(hours=12),
+ "+12",
+ ),
+ (
+ {"negative": True, "hh": "12", "name": "-12"},
+ -datetime.timedelta(hours=12),
+ "-12",
+ ),
+ )
+
+ for testtuple in testtuples:
+ result = PythonTimeBuilder.build_timezone(**testtuple[0])
+ self.assertEqual(result.utcoffset(None), testtuple[1])
+ self.assertEqual(result.tzname(None), testtuple[2])
+
+ def test_range_check_date(self):
+ # 0 isn't a valid year for a Python builder
+ with self.assertRaises(YearOutOfBoundsError):
+ PythonTimeBuilder.build_date(YYYY="0000")
+
+ # Leap year
+ # https://bitbucket.org/nielsenb/aniso8601/issues/14/parsing-ordinal-dates-should-only-allow
+ with self.assertRaises(DayOutOfBoundsError):
+ PythonTimeBuilder.build_date(YYYY="1981", DDD="366")
+
+ def test_range_check_time(self):
+ # Hour 24 can only represent midnight
+ with self.assertRaises(MidnightBoundsError):
+ PythonTimeBuilder.build_time(hh="24", mm="00", ss="01")
+
+ with self.assertRaises(MidnightBoundsError):
+ PythonTimeBuilder.build_time(hh="24", mm="00.1")
+
+ with self.assertRaises(MidnightBoundsError):
+ PythonTimeBuilder.build_time(hh="24", mm="01")
+
+ with self.assertRaises(MidnightBoundsError):
+ PythonTimeBuilder.build_time(hh="24.1")
+
+ def test_range_check_duration(self):
+ with self.assertRaises(YearOutOfBoundsError):
+ PythonTimeBuilder.build_duration(
+ PnY=str((datetime.timedelta.max.days // 365) + 1)
+ )
+
+ with self.assertRaises(MonthOutOfBoundsError):
+ PythonTimeBuilder.build_duration(
+ PnM=str((datetime.timedelta.max.days // 30) + 1)
+ )
+
+ with self.assertRaises(DayOutOfBoundsError):
+ PythonTimeBuilder.build_duration(PnD=str(datetime.timedelta.max.days + 1))
+
+ with self.assertRaises(WeekOutOfBoundsError):
+ PythonTimeBuilder.build_duration(
+ PnW=str((datetime.timedelta.max.days // 7) + 1)
+ )
+
+ with self.assertRaises(HoursOutOfBoundsError):
+ PythonTimeBuilder.build_duration(
+ TnH=str((datetime.timedelta.max.days * 24) + 24)
+ )
+
+ with self.assertRaises(MinutesOutOfBoundsError):
+ PythonTimeBuilder.build_duration(
+ TnM=str((datetime.timedelta.max.days * 24 * 60) + 24 * 60)
+ )
+
+ with self.assertRaises(SecondsOutOfBoundsError):
+ PythonTimeBuilder.build_duration(
+ TnS=str((datetime.timedelta.max.days * 24 * 60 * 60) + 24 * 60 * 60)
+ )
+
+ # Split max range across all parts
+ maxpart = datetime.timedelta.max.days // 7
+
+ with self.assertRaises(DayOutOfBoundsError):
+ PythonTimeBuilder.build_duration(
+ PnY=str((maxpart // 365) + 1),
+ PnM=str((maxpart // 30) + 1),
+ PnD=str((maxpart + 1)),
+ PnW=str((maxpart // 7) + 1),
+ TnH=str((maxpart * 24) + 1),
+ TnM=str((maxpart * 24 * 60) + 1),
+ TnS=str((maxpart * 24 * 60 * 60) + 1),
+ )
+
+ def test_range_check_interval(self):
+ with self.assertRaises(YearOutOfBoundsError):
+ PythonTimeBuilder.build_interval(
+ start=DateTuple("0007", None, None, None, None, None),
+ duration=DurationTuple(
+ None, None, None, str(datetime.timedelta.max.days), None, None, None
+ ),
+ )
+
+ with self.assertRaises(YearOutOfBoundsError):
+ PythonTimeBuilder.build_interval(
+ start=DatetimeTuple(
+ DateTuple("0007", None, None, None, None, None),
+ TimeTuple("1", None, None, None),
+ ),
+ duration=DurationTuple(
+ str(datetime.timedelta.max.days // 365),
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ ),
+ )
+
+ with self.assertRaises(YearOutOfBoundsError):
+ PythonTimeBuilder.build_interval(
+ end=DateTuple("0001", None, None, None, None, None),
+ duration=DurationTuple("3", None, None, None, None, None, None),
+ )
+
+ with self.assertRaises(YearOutOfBoundsError):
+ PythonTimeBuilder.build_interval(
+ end=DatetimeTuple(
+ DateTuple("0001", None, None, None, None, None),
+ TimeTuple("1", None, None, None),
+ ),
+ duration=DurationTuple("2", None, None, None, None, None, None),
+ )
+
+ def test_build_week_date(self):
+ weekdate = PythonTimeBuilder._build_week_date(2009, 1)
+ self.assertEqual(weekdate, datetime.date(year=2008, month=12, day=29))
+
+ weekdate = PythonTimeBuilder._build_week_date(2009, 53, isoday=7)
+ self.assertEqual(weekdate, datetime.date(year=2010, month=1, day=3))
+
+ def test_build_ordinal_date(self):
+ ordinaldate = PythonTimeBuilder._build_ordinal_date(1981, 95)
+ self.assertEqual(ordinaldate, datetime.date(year=1981, month=4, day=5))
+
+ def test_iso_year_start(self):
+ yearstart = PythonTimeBuilder._iso_year_start(2004)
+ self.assertEqual(yearstart, datetime.date(year=2003, month=12, day=29))
+
+ yearstart = PythonTimeBuilder._iso_year_start(2010)
+ self.assertEqual(yearstart, datetime.date(year=2010, month=1, day=4))
+
+ yearstart = PythonTimeBuilder._iso_year_start(2009)
+ self.assertEqual(yearstart, datetime.date(year=2008, month=12, day=29))
+
+ def test_date_generator(self):
+ startdate = datetime.date(year=2018, month=8, day=29)
+ timedelta = datetime.timedelta(days=1)
+ iterations = 10
+
+ generator = PythonTimeBuilder._date_generator(startdate, timedelta, iterations)
+
+ results = list(generator)
+
+ for dateindex in compat.range(0, 10):
+ self.assertEqual(
+ results[dateindex],
+ datetime.date(year=2018, month=8, day=29)
+ + dateindex * datetime.timedelta(days=1),
+ )
+
+ def test_date_generator_unbounded(self):
+ startdate = datetime.date(year=2018, month=8, day=29)
+ timedelta = datetime.timedelta(days=5)
+
+ generator = PythonTimeBuilder._date_generator_unbounded(startdate, timedelta)
+
+ # Check the first 10 results
+ for dateindex in compat.range(0, 10):
+ self.assertEqual(
+ next(generator),
+ datetime.date(year=2018, month=8, day=29)
+ + dateindex * datetime.timedelta(days=5),
+ )
+
+ def test_distribute_microseconds(self):
+ self.assertEqual(PythonTimeBuilder._distribute_microseconds(1, (), ()), (1,))
+ self.assertEqual(
+ PythonTimeBuilder._distribute_microseconds(11, (0,), (10,)), (1, 1)
+ )
+ self.assertEqual(
+ PythonTimeBuilder._distribute_microseconds(211, (0, 0), (100, 10)),
+ (2, 1, 1),
+ )
+
+ self.assertEqual(PythonTimeBuilder._distribute_microseconds(1, (), ()), (1,))
+ self.assertEqual(
+ PythonTimeBuilder._distribute_microseconds(11, (5,), (10,)), (6, 1)
+ )
+ self.assertEqual(
+ PythonTimeBuilder._distribute_microseconds(211, (10, 5), (100, 10)),
+ (12, 6, 1),
+ )
diff --git a/libs/aniso8601/compat.py b/libs/aniso8601/compat.py
new file mode 100644
index 000000000..25af5794a
--- /dev/null
+++ b/libs/aniso8601/compat.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+import sys
+
+PY2 = sys.version_info[0] == 2
+
+if PY2: # pragma: no cover
+ range = xrange # pylint: disable=undefined-variable
+else:
+ range = range
+
+
+def is_string(tocheck):
+ # pylint: disable=undefined-variable
+ if PY2: # pragma: no cover
+ return isinstance(tocheck, str) or isinstance(tocheck, unicode)
+
+ return isinstance(tocheck, str)
diff --git a/libs/aniso8601/date.py b/libs/aniso8601/date.py
new file mode 100644
index 000000000..ea0cf9c59
--- /dev/null
+++ b/libs/aniso8601/date.py
@@ -0,0 +1,161 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+from aniso8601.builders import TupleBuilder
+from aniso8601.builders.python import PythonTimeBuilder
+from aniso8601.compat import is_string
+from aniso8601.exceptions import ISOFormatError
+from aniso8601.resolution import DateResolution
+
+
+def get_date_resolution(isodatestr):
+ # Valid string formats are:
+ #
+ # Y[YYY]
+ # YYYY-MM-DD
+ # YYYYMMDD
+ # YYYY-MM
+ # YYYY-Www
+ # YYYYWww
+ # YYYY-Www-D
+ # YYYYWwwD
+ # YYYY-DDD
+ # YYYYDDD
+ isodatetuple = parse_date(isodatestr, builder=TupleBuilder)
+
+ if isodatetuple.DDD is not None:
+ # YYYY-DDD
+ # YYYYDDD
+ return DateResolution.Ordinal
+
+ if isodatetuple.D is not None:
+ # YYYY-Www-D
+ # YYYYWwwD
+ return DateResolution.Weekday
+
+ if isodatetuple.Www is not None:
+ # YYYY-Www
+ # YYYYWww
+ return DateResolution.Week
+
+ if isodatetuple.DD is not None:
+ # YYYY-MM-DD
+ # YYYYMMDD
+ return DateResolution.Day
+
+ if isodatetuple.MM is not None:
+ # YYYY-MM
+ return DateResolution.Month
+
+ # Y[YYY]
+ return DateResolution.Year
+
+
+def parse_date(isodatestr, builder=PythonTimeBuilder):
+ # Given a string in any ISO 8601 date format, return a datetime.date
+ # object that corresponds to the given date. Valid string formats are:
+ #
+ # Y[YYY]
+ # YYYY-MM-DD
+ # YYYYMMDD
+ # YYYY-MM
+ # YYYY-Www
+ # YYYYWww
+ # YYYY-Www-D
+ # YYYYWwwD
+ # YYYY-DDD
+ # YYYYDDD
+ if is_string(isodatestr) is False:
+ raise ValueError("Date must be string.")
+
+ if isodatestr.startswith("+") or isodatestr.startswith("-"):
+ raise NotImplementedError(
+ "ISO 8601 extended year representation " "not supported."
+ )
+
+ if len(isodatestr) == 0 or isodatestr.count("-") > 2:
+ raise ISOFormatError('"{0}" is not a valid ISO 8601 date.'.format(isodatestr))
+ yearstr = None
+ monthstr = None
+ daystr = None
+ weekstr = None
+ weekdaystr = None
+ ordinaldaystr = None
+
+ if len(isodatestr) <= 4:
+ # Y[YYY]
+ yearstr = isodatestr
+ elif "W" in isodatestr:
+ if len(isodatestr) == 10:
+ # YYYY-Www-D
+ yearstr = isodatestr[0:4]
+ weekstr = isodatestr[6:8]
+ weekdaystr = isodatestr[9]
+ elif len(isodatestr) == 8:
+ if "-" in isodatestr:
+ # YYYY-Www
+ yearstr = isodatestr[0:4]
+ weekstr = isodatestr[6:]
+ else:
+ # YYYYWwwD
+ yearstr = isodatestr[0:4]
+ weekstr = isodatestr[5:7]
+ weekdaystr = isodatestr[7]
+ elif len(isodatestr) == 7:
+ # YYYYWww
+ yearstr = isodatestr[0:4]
+ weekstr = isodatestr[5:]
+ elif len(isodatestr) == 7:
+ if "-" in isodatestr:
+ # YYYY-MM
+ yearstr = isodatestr[0:4]
+ monthstr = isodatestr[5:]
+ else:
+ # YYYYDDD
+ yearstr = isodatestr[0:4]
+ ordinaldaystr = isodatestr[4:]
+ elif len(isodatestr) == 8:
+ if "-" in isodatestr:
+ # YYYY-DDD
+ yearstr = isodatestr[0:4]
+ ordinaldaystr = isodatestr[5:]
+ else:
+ # YYYYMMDD
+ yearstr = isodatestr[0:4]
+ monthstr = isodatestr[4:6]
+ daystr = isodatestr[6:]
+ elif len(isodatestr) == 10:
+ # YYYY-MM-DD
+ yearstr = isodatestr[0:4]
+ monthstr = isodatestr[5:7]
+ daystr = isodatestr[8:]
+ else:
+ raise ISOFormatError('"{0}" is not a valid ISO 8601 date.'.format(isodatestr))
+
+ hascomponent = False
+
+ for componentstr in [yearstr, monthstr, daystr, weekstr, weekdaystr, ordinaldaystr]:
+ if componentstr is not None:
+ hascomponent = True
+
+ if componentstr.isdigit() is False:
+ raise ISOFormatError(
+ '"{0}" is not a valid ISO 8601 date.'.format(isodatestr)
+ )
+
+ if hascomponent is False:
+ raise ISOFormatError('"{0}" is not a valid ISO 8601 date.'.format(isodatestr))
+
+ return builder.build_date(
+ YYYY=yearstr,
+ MM=monthstr,
+ DD=daystr,
+ Www=weekstr,
+ D=weekdaystr,
+ DDD=ordinaldaystr,
+ )
diff --git a/libs/aniso8601/decimalfraction.py b/libs/aniso8601/decimalfraction.py
new file mode 100644
index 000000000..3086ee794
--- /dev/null
+++ b/libs/aniso8601/decimalfraction.py
@@ -0,0 +1,12 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+
+def normalize(value):
+ """Returns the string with decimal separators normalized."""
+ return value.replace(",", ".")
diff --git a/libs/aniso8601/duration.py b/libs/aniso8601/duration.py
new file mode 100644
index 000000000..cdc0f8f7f
--- /dev/null
+++ b/libs/aniso8601/duration.py
@@ -0,0 +1,291 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+from aniso8601 import compat
+from aniso8601.builders import TupleBuilder
+from aniso8601.builders.python import PythonTimeBuilder
+from aniso8601.date import parse_date
+from aniso8601.decimalfraction import normalize
+from aniso8601.exceptions import ISOFormatError
+from aniso8601.resolution import DurationResolution
+from aniso8601.time import parse_time
+
+
+def get_duration_resolution(isodurationstr):
+ # Valid string formats are:
+ #
+ # PnYnMnDTnHnMnS (or any reduced precision equivalent)
+ # PnW
+ # P<date>T<time>
+ isodurationtuple = parse_duration(isodurationstr, builder=TupleBuilder)
+
+ if isodurationtuple.TnS is not None:
+ return DurationResolution.Seconds
+
+ if isodurationtuple.TnM is not None:
+ return DurationResolution.Minutes
+
+ if isodurationtuple.TnH is not None:
+ return DurationResolution.Hours
+
+ if isodurationtuple.PnD is not None:
+ return DurationResolution.Days
+
+ if isodurationtuple.PnW is not None:
+ return DurationResolution.Weeks
+
+ if isodurationtuple.PnM is not None:
+ return DurationResolution.Months
+
+ return DurationResolution.Years
+
+
+def parse_duration(isodurationstr, builder=PythonTimeBuilder):
+ # Given a string representing an ISO 8601 duration, return a
+ # a duration built by the given builder. Valid formats are:
+ #
+ # PnYnMnDTnHnMnS (or any reduced precision equivalent)
+ # PnW
+ # P<date>T<time>
+
+ if compat.is_string(isodurationstr) is False:
+ raise ValueError("Duration must be string.")
+
+ if len(isodurationstr) == 0:
+ raise ISOFormatError(
+ '"{0}" is not a valid ISO 8601 duration.'.format(isodurationstr)
+ )
+
+ if isodurationstr[0] != "P":
+ raise ISOFormatError("ISO 8601 duration must start with a P.")
+
+ # If Y, M, D, H, S, or W are in the string,
+ # assume it is a specified duration
+ if _has_any_component(isodurationstr, ["Y", "M", "D", "H", "S", "W"]) is True:
+ parseresult = _parse_duration_prescribed(isodurationstr)
+ return builder.build_duration(**parseresult)
+
+ if isodurationstr.find("T") != -1:
+ parseresult = _parse_duration_combined(isodurationstr)
+ return builder.build_duration(**parseresult)
+
+ raise ISOFormatError(
+ '"{0}" is not a valid ISO 8601 duration.'.format(isodurationstr)
+ )
+
+
+def _parse_duration_prescribed(isodurationstr):
+ # durationstr can be of the form PnYnMnDTnHnMnS or PnW
+
+ # Make sure the end character is valid
+ # https://bitbucket.org/nielsenb/aniso8601/issues/9/durations-with-trailing-garbage-are-parsed
+ if isodurationstr[-1] not in ["Y", "M", "D", "H", "S", "W"]:
+ raise ISOFormatError("ISO 8601 duration must end with a valid " "character.")
+
+ # Make sure only the lowest order element has decimal precision
+ durationstr = normalize(isodurationstr)
+
+ if durationstr.count(".") > 1:
+ raise ISOFormatError(
+ "ISO 8601 allows only lowest order element to " "have a decimal fraction."
+ )
+
+ seperatoridx = durationstr.find(".")
+
+ if seperatoridx != -1:
+ remaining = durationstr[seperatoridx + 1 : -1]
+
+ # There should only ever be 1 letter after a decimal if there is more
+ # then one, the string is invalid
+ if remaining.isdigit() is False:
+ raise ISOFormatError(
+ "ISO 8601 duration must end with " "a single valid character."
+ )
+
+ # Do not allow W in combination with other designators
+ # https://bitbucket.org/nielsenb/aniso8601/issues/2/week-designators-should-not-be-combinable
+ if (
+ durationstr.find("W") != -1
+ and _has_any_component(durationstr, ["Y", "M", "D", "H", "S"]) is True
+ ):
+ raise ISOFormatError(
+ "ISO 8601 week designators may not be combined "
+ "with other time designators."
+ )
+
+ # Parse the elements of the duration
+ if durationstr.find("T") == -1:
+ return _parse_duration_prescribed_notime(durationstr)
+
+ return _parse_duration_prescribed_time(durationstr)
+
+
+def _parse_duration_prescribed_notime(isodurationstr):
+ # durationstr can be of the form PnYnMnD or PnW
+
+ durationstr = normalize(isodurationstr)
+
+ yearstr = None
+ monthstr = None
+ daystr = None
+ weekstr = None
+
+ weekidx = durationstr.find("W")
+ yearidx = durationstr.find("Y")
+ monthidx = durationstr.find("M")
+ dayidx = durationstr.find("D")
+
+ if weekidx != -1:
+ weekstr = durationstr[1:-1]
+ elif yearidx != -1 and monthidx != -1 and dayidx != -1:
+ yearstr = durationstr[1:yearidx]
+ monthstr = durationstr[yearidx + 1 : monthidx]
+ daystr = durationstr[monthidx + 1 : -1]
+ elif yearidx != -1 and monthidx != -1:
+ yearstr = durationstr[1:yearidx]
+ monthstr = durationstr[yearidx + 1 : monthidx]
+ elif yearidx != -1 and dayidx != -1:
+ yearstr = durationstr[1:yearidx]
+ daystr = durationstr[yearidx + 1 : dayidx]
+ elif monthidx != -1 and dayidx != -1:
+ monthstr = durationstr[1:monthidx]
+ daystr = durationstr[monthidx + 1 : -1]
+ elif yearidx != -1:
+ yearstr = durationstr[1:-1]
+ elif monthidx != -1:
+ monthstr = durationstr[1:-1]
+ elif dayidx != -1:
+ daystr = durationstr[1:-1]
+ else:
+ raise ISOFormatError(
+ '"{0}" is not a valid ISO 8601 duration.'.format(isodurationstr)
+ )
+
+ for componentstr in [yearstr, monthstr, daystr, weekstr]:
+ if componentstr is not None:
+ if "." in componentstr:
+ intstr, fractionalstr = componentstr.split(".", 1)
+
+ if intstr.isdigit() is False:
+ raise ISOFormatError(
+ '"{0}" is not a valid ISO 8601 duration.'.format(isodurationstr)
+ )
+ else:
+ if componentstr.isdigit() is False:
+ raise ISOFormatError(
+ '"{0}" is not a valid ISO 8601 duration.'.format(isodurationstr)
+ )
+
+ return {"PnY": yearstr, "PnM": monthstr, "PnW": weekstr, "PnD": daystr}
+
+
+def _parse_duration_prescribed_time(isodurationstr):
+ # durationstr can be of the form PnYnMnDTnHnMnS
+
+ timeidx = isodurationstr.find("T")
+
+ datestr = isodurationstr[:timeidx]
+ timestr = normalize(isodurationstr[timeidx + 1 :])
+
+ hourstr = None
+ minutestr = None
+ secondstr = None
+
+ houridx = timestr.find("H")
+ minuteidx = timestr.find("M")
+ secondidx = timestr.find("S")
+
+ if houridx != -1 and minuteidx != -1 and secondidx != -1:
+ hourstr = timestr[0:houridx]
+ minutestr = timestr[houridx + 1 : minuteidx]
+ secondstr = timestr[minuteidx + 1 : -1]
+ elif houridx != -1 and minuteidx != -1:
+ hourstr = timestr[0:houridx]
+ minutestr = timestr[houridx + 1 : minuteidx]
+ elif houridx != -1 and secondidx != -1:
+ hourstr = timestr[0:houridx]
+ secondstr = timestr[houridx + 1 : -1]
+ elif minuteidx != -1 and secondidx != -1:
+ minutestr = timestr[0:minuteidx]
+ secondstr = timestr[minuteidx + 1 : -1]
+ elif houridx != -1:
+ hourstr = timestr[0:-1]
+ elif minuteidx != -1:
+ minutestr = timestr[0:-1]
+ elif secondidx != -1:
+ secondstr = timestr[0:-1]
+ else:
+ raise ISOFormatError(
+ '"{0}" is not a valid ISO 8601 duration.'.format(isodurationstr)
+ )
+
+ for componentstr in [hourstr, minutestr, secondstr]:
+ if componentstr is not None:
+ if "." in componentstr:
+ intstr, fractionalstr = componentstr.split(".", 1)
+
+ if intstr.isdigit() is False:
+ raise ISOFormatError(
+ '"{0}" is not a valid ISO 8601 duration.'.format(isodurationstr)
+ )
+ else:
+ if componentstr.isdigit() is False:
+ raise ISOFormatError(
+ '"{0}" is not a valid ISO 8601 duration.'.format(isodurationstr)
+ )
+
+ # Parse any date components
+ durationdict = {"PnY": None, "PnM": None, "PnW": None, "PnD": None}
+
+ if len(datestr) > 1:
+ durationdict = _parse_duration_prescribed_notime(datestr)
+
+ durationdict.update({"TnH": hourstr, "TnM": minutestr, "TnS": secondstr})
+
+ return durationdict
+
+
+def _parse_duration_combined(durationstr):
+ # Period of the form P<date>T<time>
+
+ # Split the string in to its component parts
+ datepart, timepart = durationstr[1:].split("T", 1) # We skip the 'P'
+
+ datevalue = parse_date(datepart, builder=TupleBuilder)
+ timevalue = parse_time(timepart, builder=TupleBuilder)
+
+ return {
+ "PnY": datevalue.YYYY,
+ "PnM": datevalue.MM,
+ "PnD": datevalue.DD,
+ "TnH": timevalue.hh,
+ "TnM": timevalue.mm,
+ "TnS": timevalue.ss,
+ }
+
+
+def _has_any_component(durationstr, components):
+ # Given a duration string, and a list of components, returns True
+ # if any of the listed components are present, False otherwise.
+ #
+ # For instance:
+ # durationstr = 'P1Y'
+ # components = ['Y', 'M']
+ #
+ # returns True
+ #
+ # durationstr = 'P1Y'
+ # components = ['M', 'D']
+ #
+ # returns False
+
+ for component in components:
+ if durationstr.find(component) != -1:
+ return True
+
+ return False
diff --git a/libs/aniso8601/exceptions.py b/libs/aniso8601/exceptions.py
new file mode 100644
index 000000000..c14b44421
--- /dev/null
+++ b/libs/aniso8601/exceptions.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+
+class ISOFormatError(ValueError):
+ """Raised when ISO 8601 string fails a format check."""
+
+
+class RangeCheckError(ValueError):
+ """Parent type of range check errors."""
+
+
+class YearOutOfBoundsError(RangeCheckError):
+ """Raised when year exceeds limits."""
+
+
+class MonthOutOfBoundsError(RangeCheckError):
+ """Raised when month is outside of 1..12."""
+
+
+class WeekOutOfBoundsError(RangeCheckError):
+ """Raised when week exceeds a year."""
+
+
+class DayOutOfBoundsError(RangeCheckError):
+ """Raised when day is outside of 1..365, 1..366 for leap year."""
+
+
+class HoursOutOfBoundsError(RangeCheckError):
+ """Raise when parsed hours are greater than 24."""
+
+
+class MinutesOutOfBoundsError(RangeCheckError):
+ """Raise when parsed seconds are greater than 60."""
+
+
+class SecondsOutOfBoundsError(RangeCheckError):
+ """Raise when parsed seconds are greater than 60."""
+
+
+class MidnightBoundsError(RangeCheckError):
+ """Raise when parsed time has an hour of 24 but is not midnight."""
+
+
+class LeapSecondError(RangeCheckError):
+ """Raised when attempting to parse a leap second"""
diff --git a/libs/aniso8601/interval.py b/libs/aniso8601/interval.py
new file mode 100644
index 000000000..cd0184c6b
--- /dev/null
+++ b/libs/aniso8601/interval.py
@@ -0,0 +1,350 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+from aniso8601.builders import DatetimeTuple, DateTuple, TupleBuilder
+from aniso8601.builders.python import PythonTimeBuilder
+from aniso8601.compat import is_string
+from aniso8601.date import parse_date
+from aniso8601.duration import parse_duration
+from aniso8601.exceptions import ISOFormatError
+from aniso8601.resolution import IntervalResolution
+from aniso8601.time import parse_datetime, parse_time
+
+
+def get_interval_resolution(
+ isointervalstr, intervaldelimiter="/", datetimedelimiter="T"
+):
+ isointervaltuple = parse_interval(
+ isointervalstr,
+ intervaldelimiter=intervaldelimiter,
+ datetimedelimiter=datetimedelimiter,
+ builder=TupleBuilder,
+ )
+
+ return _get_interval_resolution(isointervaltuple)
+
+
+def get_repeating_interval_resolution(
+ isointervalstr, intervaldelimiter="/", datetimedelimiter="T"
+):
+ repeatingintervaltuple = parse_repeating_interval(
+ isointervalstr,
+ intervaldelimiter=intervaldelimiter,
+ datetimedelimiter=datetimedelimiter,
+ builder=TupleBuilder,
+ )
+
+ return _get_interval_resolution(repeatingintervaltuple.interval)
+
+
+def _get_interval_resolution(intervaltuple):
+ if intervaltuple.start is not None and intervaltuple.end is not None:
+ return max(
+ _get_interval_component_resolution(intervaltuple.start),
+ _get_interval_component_resolution(intervaltuple.end),
+ )
+
+ if intervaltuple.start is not None and intervaltuple.duration is not None:
+ return max(
+ _get_interval_component_resolution(intervaltuple.start),
+ _get_interval_component_resolution(intervaltuple.duration),
+ )
+
+ return max(
+ _get_interval_component_resolution(intervaltuple.end),
+ _get_interval_component_resolution(intervaltuple.duration),
+ )
+
+
+def _get_interval_component_resolution(componenttuple):
+ if type(componenttuple) is DateTuple:
+ if componenttuple.DDD is not None:
+ # YYYY-DDD
+ # YYYYDDD
+ return IntervalResolution.Ordinal
+
+ if componenttuple.D is not None:
+ # YYYY-Www-D
+ # YYYYWwwD
+ return IntervalResolution.Weekday
+
+ if componenttuple.Www is not None:
+ # YYYY-Www
+ # YYYYWww
+ return IntervalResolution.Week
+
+ if componenttuple.DD is not None:
+ # YYYY-MM-DD
+ # YYYYMMDD
+ return IntervalResolution.Day
+
+ if componenttuple.MM is not None:
+ # YYYY-MM
+ return IntervalResolution.Month
+
+ # Y[YYY]
+ return IntervalResolution.Year
+ elif type(componenttuple) is DatetimeTuple:
+ # Datetime
+ if componenttuple.time.ss is not None:
+ return IntervalResolution.Seconds
+
+ if componenttuple.time.mm is not None:
+ return IntervalResolution.Minutes
+
+ return IntervalResolution.Hours
+
+ # Duration
+ if componenttuple.TnS is not None:
+ return IntervalResolution.Seconds
+
+ if componenttuple.TnM is not None:
+ return IntervalResolution.Minutes
+
+ if componenttuple.TnH is not None:
+ return IntervalResolution.Hours
+
+ if componenttuple.PnD is not None:
+ return IntervalResolution.Day
+
+ if componenttuple.PnW is not None:
+ return IntervalResolution.Week
+
+ if componenttuple.PnM is not None:
+ return IntervalResolution.Month
+
+ return IntervalResolution.Year
+
+
+def parse_interval(
+ isointervalstr,
+ intervaldelimiter="/",
+ datetimedelimiter="T",
+ builder=PythonTimeBuilder,
+):
+ # Given a string representing an ISO 8601 interval, return an
+ # interval built by the given builder. Valid formats are:
+ #
+ # <start>/<end>
+ # <start>/<duration>
+ # <duration>/<end>
+ #
+ # The <start> and <end> values can represent dates, or datetimes,
+ # not times.
+ #
+ # The format:
+ #
+ # <duration>
+ #
+ # Is expressly not supported as there is no way to provide the additional
+ # required context.
+
+ if is_string(isointervalstr) is False:
+ raise ValueError("Interval must be string.")
+
+ if len(isointervalstr) == 0:
+ raise ISOFormatError("Interval string is empty.")
+
+ if isointervalstr[0] == "R":
+ raise ISOFormatError(
+ "ISO 8601 repeating intervals must be parsed "
+ "with parse_repeating_interval."
+ )
+
+ intervaldelimitercount = isointervalstr.count(intervaldelimiter)
+
+ if intervaldelimitercount == 0:
+ raise ISOFormatError(
+ 'Interval delimiter "{0}" is not in interval '
+ 'string "{1}".'.format(intervaldelimiter, isointervalstr)
+ )
+
+ if intervaldelimitercount > 1:
+ raise ISOFormatError(
+ "{0} is not a valid ISO 8601 interval".format(isointervalstr)
+ )
+
+ return _parse_interval(
+ isointervalstr, builder, intervaldelimiter, datetimedelimiter
+ )
+
+
+def parse_repeating_interval(
+ isointervalstr,
+ intervaldelimiter="/",
+ datetimedelimiter="T",
+ builder=PythonTimeBuilder,
+):
+ # Given a string representing an ISO 8601 interval repeating, return an
+ # interval built by the given builder. Valid formats are:
+ #
+ # Rnn/<interval>
+ # R/<interval>
+
+ if not isinstance(isointervalstr, str):
+ raise ValueError("Interval must be string.")
+
+ if len(isointervalstr) == 0:
+ raise ISOFormatError("Repeating interval string is empty.")
+
+ if isointervalstr[0] != "R":
+ raise ISOFormatError("ISO 8601 repeating interval must start " "with an R.")
+
+ if intervaldelimiter not in isointervalstr:
+ raise ISOFormatError(
+ 'Interval delimiter "{0}" is not in interval '
+ 'string "{1}".'.format(intervaldelimiter, isointervalstr)
+ )
+
+ # Parse the number of iterations
+ iterationpart, intervalpart = isointervalstr.split(intervaldelimiter, 1)
+
+ if len(iterationpart) > 1:
+ R = False
+ Rnn = iterationpart[1:]
+ else:
+ R = True
+ Rnn = None
+
+ interval = _parse_interval(
+ intervalpart, TupleBuilder, intervaldelimiter, datetimedelimiter
+ )
+
+ return builder.build_repeating_interval(R=R, Rnn=Rnn, interval=interval)
+
+
+def _parse_interval(
+ isointervalstr, builder, intervaldelimiter="/", datetimedelimiter="T"
+):
+ # Returns a tuple containing the start of the interval, the end of the
+ # interval, and or the interval duration
+
+ firstpart, secondpart = isointervalstr.split(intervaldelimiter)
+
+ if len(firstpart) == 0 or len(secondpart) == 0:
+ raise ISOFormatError(
+ "{0} is not a valid ISO 8601 interval".format(isointervalstr)
+ )
+
+ if firstpart[0] == "P":
+ # <duration>/<end>
+ # Notice that these are not returned 'in order' (earlier to later), this
+ # is to maintain consistency with parsing <start>/<end> durations, as
+ # well as making repeating interval code cleaner. Users who desire
+ # durations to be in order can use the 'sorted' operator.
+ duration = parse_duration(firstpart, builder=TupleBuilder)
+
+ # We need to figure out if <end> is a date, or a datetime
+ if secondpart.find(datetimedelimiter) != -1:
+ # <end> is a datetime
+ endtuple = parse_datetime(
+ secondpart, delimiter=datetimedelimiter, builder=TupleBuilder
+ )
+ else:
+ endtuple = parse_date(secondpart, builder=TupleBuilder)
+
+ return builder.build_interval(end=endtuple, duration=duration)
+ elif secondpart[0] == "P":
+ # <start>/<duration>
+ # We need to figure out if <start> is a date, or a datetime
+ duration = parse_duration(secondpart, builder=TupleBuilder)
+
+ if firstpart.find(datetimedelimiter) != -1:
+ # <start> is a datetime
+ starttuple = parse_datetime(
+ firstpart, delimiter=datetimedelimiter, builder=TupleBuilder
+ )
+ else:
+ # <start> must just be a date
+ starttuple = parse_date(firstpart, builder=TupleBuilder)
+
+ return builder.build_interval(start=starttuple, duration=duration)
+
+ # <start>/<end>
+ if firstpart.find(datetimedelimiter) != -1:
+ # Both parts are datetimes
+ starttuple = parse_datetime(
+ firstpart, delimiter=datetimedelimiter, builder=TupleBuilder
+ )
+ else:
+ starttuple = parse_date(firstpart, builder=TupleBuilder)
+
+ endtuple = _parse_interval_end(secondpart, starttuple, datetimedelimiter)
+
+ return builder.build_interval(start=starttuple, end=endtuple)
+
+
+def _parse_interval_end(endstr, starttuple, datetimedelimiter):
+ datestr = None
+ timestr = None
+
+ monthstr = None
+ daystr = None
+
+ concise = False
+
+ if type(starttuple) is DateTuple:
+ startdatetuple = starttuple
+ else:
+ # Start is a datetime
+ startdatetuple = starttuple.date
+
+ if datetimedelimiter in endstr:
+ datestr, timestr = endstr.split(datetimedelimiter, 1)
+ elif ":" in endstr:
+ timestr = endstr
+ else:
+ datestr = endstr
+
+ if timestr is not None:
+ endtimetuple = parse_time(timestr, builder=TupleBuilder)
+
+ # End is just a time
+ if datestr is None:
+ return endtimetuple
+
+ # Handle backwards concise representation
+ if datestr.count("-") == 1:
+ monthstr, daystr = datestr.split("-")
+ concise = True
+ elif len(datestr) <= 2:
+ daystr = datestr
+ concise = True
+ elif len(datestr) <= 4:
+ monthstr = datestr[0:2]
+ daystr = datestr[2:]
+ concise = True
+
+ if concise is True:
+ concisedatestr = startdatetuple.YYYY
+
+ # Separators required because concise elements may be missing digits
+ if monthstr is not None:
+ concisedatestr += "-" + monthstr
+ elif startdatetuple.MM is not None:
+ concisedatestr += "-" + startdatetuple.MM
+
+ concisedatestr += "-" + daystr
+
+ enddatetuple = parse_date(concisedatestr, builder=TupleBuilder)
+
+ # Clear unsupplied components
+ if monthstr is None:
+ enddatetuple = TupleBuilder.build_date(DD=enddatetuple.DD)
+ else:
+ # Year not provided
+ enddatetuple = TupleBuilder.build_date(
+ MM=enddatetuple.MM, DD=enddatetuple.DD
+ )
+ else:
+ enddatetuple = parse_date(datestr, builder=TupleBuilder)
+
+ if timestr is None:
+ return enddatetuple
+
+ return TupleBuilder.build_datetime(enddatetuple, endtimetuple)
diff --git a/libs/aniso8601/resolution.py b/libs/aniso8601/resolution.py
new file mode 100644
index 000000000..eca2d4e7e
--- /dev/null
+++ b/libs/aniso8601/resolution.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+from aniso8601 import compat
+
+
+class DateResolution(object):
+ Year, Month, Week, Weekday, Day, Ordinal = list(compat.range(6))
+
+
+class DurationResolution(object):
+ Years, Months, Weeks, Days, Hours, Minutes, Seconds = list(compat.range(7))
+
+
+class IntervalResolution(object):
+ Year, Month, Week, Weekday, Day, Ordinal, Hours, Minutes, Seconds = list(
+ compat.range(9)
+ )
+
+
+class TimeResolution(object):
+ Seconds, Minutes, Hours = list(compat.range(3))
diff --git a/libs/aniso8601/tests/__init__.py b/libs/aniso8601/tests/__init__.py
new file mode 100644
index 000000000..1a94e017a
--- /dev/null
+++ b/libs/aniso8601/tests/__init__.py
@@ -0,0 +1,7 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
diff --git a/libs/aniso8601/tests/compat.py b/libs/aniso8601/tests/compat.py
new file mode 100644
index 000000000..6c5266589
--- /dev/null
+++ b/libs/aniso8601/tests/compat.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+import sys
+
+PY2 = sys.version_info[0] == 2
+
+if PY2:
+ import mock # pylint: disable=import-error
+else:
+ from unittest import mock
diff --git a/libs/aniso8601/tests/test_compat.py b/libs/aniso8601/tests/test_compat.py
new file mode 100644
index 000000000..2f7988331
--- /dev/null
+++ b/libs/aniso8601/tests/test_compat.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+import unittest
+
+from aniso8601.compat import PY2, is_string
+
+
+class TestCompatFunctions(unittest.TestCase):
+ def test_is_string(self):
+ self.assertTrue(is_string("asdf"))
+ self.assertTrue(is_string(""))
+
+ # pylint: disable=undefined-variable
+ if PY2 is True:
+ self.assertTrue(is_string(unicode("asdf")))
+
+ self.assertFalse(is_string(None))
+ self.assertFalse(is_string(123))
+ self.assertFalse(is_string(4.56))
+ self.assertFalse(is_string([]))
+ self.assertFalse(is_string({}))
diff --git a/libs/aniso8601/tests/test_date.py b/libs/aniso8601/tests/test_date.py
new file mode 100644
index 000000000..54e3076eb
--- /dev/null
+++ b/libs/aniso8601/tests/test_date.py
@@ -0,0 +1,303 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+import unittest
+
+import aniso8601
+from aniso8601.date import get_date_resolution, parse_date
+from aniso8601.exceptions import DayOutOfBoundsError, ISOFormatError
+from aniso8601.resolution import DateResolution
+from aniso8601.tests.compat import mock
+
+
+class TestDateResolutionFunctions(unittest.TestCase):
+ def test_get_date_resolution_year(self):
+ self.assertEqual(get_date_resolution("2013"), DateResolution.Year)
+ self.assertEqual(get_date_resolution("0001"), DateResolution.Year)
+ self.assertEqual(get_date_resolution("19"), DateResolution.Year)
+
+ def test_get_date_resolution_month(self):
+ self.assertEqual(get_date_resolution("1981-04"), DateResolution.Month)
+
+ def test_get_date_resolution_week(self):
+ self.assertEqual(get_date_resolution("2004-W53"), DateResolution.Week)
+ self.assertEqual(get_date_resolution("2009-W01"), DateResolution.Week)
+ self.assertEqual(get_date_resolution("2004W53"), DateResolution.Week)
+
+ def test_get_date_resolution_day(self):
+ self.assertEqual(get_date_resolution("2004-04-11"), DateResolution.Day)
+ self.assertEqual(get_date_resolution("20090121"), DateResolution.Day)
+
+ def test_get_date_resolution_year_weekday(self):
+ self.assertEqual(get_date_resolution("2004-W53-6"), DateResolution.Weekday)
+ self.assertEqual(get_date_resolution("2004W536"), DateResolution.Weekday)
+
+ def test_get_date_resolution_year_ordinal(self):
+ self.assertEqual(get_date_resolution("1981-095"), DateResolution.Ordinal)
+ self.assertEqual(get_date_resolution("1981095"), DateResolution.Ordinal)
+
+ def test_get_date_resolution_badtype(self):
+ testtuples = (None, 1, False, 1.234)
+
+ for testtuple in testtuples:
+ with self.assertRaises(ValueError):
+ get_date_resolution(testtuple)
+
+ def test_get_date_resolution_extended_year(self):
+ testtuples = ("+2000", "+30000")
+
+ for testtuple in testtuples:
+ with self.assertRaises(NotImplementedError):
+ get_date_resolution(testtuple)
+
+ def test_get_date_resolution_badweek(self):
+ testtuples = ("2004-W1", "2004W1")
+
+ for testtuple in testtuples:
+ with self.assertRaises(ISOFormatError):
+ get_date_resolution(testtuple)
+
+ def test_get_date_resolution_badweekday(self):
+ testtuples = ("2004-W53-67", "2004W5367")
+
+ for testtuple in testtuples:
+ with self.assertRaises(ISOFormatError):
+ get_date_resolution(testtuple)
+
+ def test_get_date_resolution_badstr(self):
+ testtuples = (
+ "W53",
+ "2004-W",
+ "2014-01-230",
+ "2014-012-23",
+ "201-01-23",
+ "201401230",
+ "201401",
+ "",
+ )
+
+ for testtuple in testtuples:
+ with self.assertRaises(ISOFormatError):
+ get_date_resolution(testtuple)
+
+
+class TestDateParserFunctions(unittest.TestCase):
+ def test_parse_date(self):
+ testtuples = (
+ (
+ "2013",
+ {
+ "YYYY": "2013",
+ "MM": None,
+ "DD": None,
+ "Www": None,
+ "D": None,
+ "DDD": None,
+ },
+ ),
+ (
+ "0001",
+ {
+ "YYYY": "0001",
+ "MM": None,
+ "DD": None,
+ "Www": None,
+ "D": None,
+ "DDD": None,
+ },
+ ),
+ (
+ "19",
+ {
+ "YYYY": "19",
+ "MM": None,
+ "DD": None,
+ "Www": None,
+ "D": None,
+ "DDD": None,
+ },
+ ),
+ (
+ "1981-04-05",
+ {
+ "YYYY": "1981",
+ "MM": "04",
+ "DD": "05",
+ "Www": None,
+ "D": None,
+ "DDD": None,
+ },
+ ),
+ (
+ "19810405",
+ {
+ "YYYY": "1981",
+ "MM": "04",
+ "DD": "05",
+ "Www": None,
+ "D": None,
+ "DDD": None,
+ },
+ ),
+ (
+ "1981-04",
+ {
+ "YYYY": "1981",
+ "MM": "04",
+ "DD": None,
+ "Www": None,
+ "D": None,
+ "DDD": None,
+ },
+ ),
+ (
+ "2004-W53",
+ {
+ "YYYY": "2004",
+ "MM": None,
+ "DD": None,
+ "Www": "53",
+ "D": None,
+ "DDD": None,
+ },
+ ),
+ (
+ "2009-W01",
+ {
+ "YYYY": "2009",
+ "MM": None,
+ "DD": None,
+ "Www": "01",
+ "D": None,
+ "DDD": None,
+ },
+ ),
+ (
+ "2004-W53-6",
+ {
+ "YYYY": "2004",
+ "MM": None,
+ "DD": None,
+ "Www": "53",
+ "D": "6",
+ "DDD": None,
+ },
+ ),
+ (
+ "2004W53",
+ {
+ "YYYY": "2004",
+ "MM": None,
+ "DD": None,
+ "Www": "53",
+ "D": None,
+ "DDD": None,
+ },
+ ),
+ (
+ "2004W536",
+ {
+ "YYYY": "2004",
+ "MM": None,
+ "DD": None,
+ "Www": "53",
+ "D": "6",
+ "DDD": None,
+ },
+ ),
+ (
+ "1981-095",
+ {
+ "YYYY": "1981",
+ "MM": None,
+ "DD": None,
+ "Www": None,
+ "D": None,
+ "DDD": "095",
+ },
+ ),
+ (
+ "1981095",
+ {
+ "YYYY": "1981",
+ "MM": None,
+ "DD": None,
+ "Www": None,
+ "D": None,
+ "DDD": "095",
+ },
+ ),
+ (
+ "1980366",
+ {
+ "YYYY": "1980",
+ "MM": None,
+ "DD": None,
+ "Www": None,
+ "D": None,
+ "DDD": "366",
+ },
+ ),
+ )
+
+ for testtuple in testtuples:
+ with mock.patch.object(
+ aniso8601.date.PythonTimeBuilder, "build_date"
+ ) as mockBuildDate:
+ mockBuildDate.return_value = testtuple[1]
+
+ result = parse_date(testtuple[0])
+
+ self.assertEqual(result, testtuple[1])
+ mockBuildDate.assert_called_once_with(**testtuple[1])
+
+ def test_parse_date_badtype(self):
+ testtuples = (None, 1, False, 1.234)
+
+ for testtuple in testtuples:
+ with self.assertRaises(ValueError):
+ parse_date(testtuple, builder=None)
+
+ def test_parse_date_badstr(self):
+ testtuples = (
+ "W53",
+ "2004-W",
+ "2014-01-230",
+ "2014-012-23",
+ "201-01-23",
+ "201401230",
+ "201401",
+ "9999 W53",
+ "20.50230",
+ "198104",
+ "bad",
+ "",
+ )
+
+ for testtuple in testtuples:
+ with self.assertRaises(ISOFormatError):
+ parse_date(testtuple, builder=None)
+
+ def test_parse_date_mockbuilder(self):
+ mockBuilder = mock.Mock()
+
+ expectedargs = {
+ "YYYY": "1981",
+ "MM": "04",
+ "DD": "05",
+ "Www": None,
+ "D": None,
+ "DDD": None,
+ }
+
+ mockBuilder.build_date.return_value = expectedargs
+
+ result = parse_date("1981-04-05", builder=mockBuilder)
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.build_date.assert_called_once_with(**expectedargs)
diff --git a/libs/aniso8601/tests/test_decimalfraction.py b/libs/aniso8601/tests/test_decimalfraction.py
new file mode 100644
index 000000000..dc52f2406
--- /dev/null
+++ b/libs/aniso8601/tests/test_decimalfraction.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+import unittest
+
+from aniso8601.decimalfraction import normalize
+
+
+class TestDecimalFractionFunctions(unittest.TestCase):
+ def test_normalize(self):
+ self.assertEqual(normalize(""), "")
+ self.assertEqual(normalize("12.34"), "12.34")
+ self.assertEqual(normalize("123,45"), "123.45")
+ self.assertEqual(normalize("123,45,67"), "123.45.67")
diff --git a/libs/aniso8601/tests/test_duration.py b/libs/aniso8601/tests/test_duration.py
new file mode 100644
index 000000000..0d7d40a4d
--- /dev/null
+++ b/libs/aniso8601/tests/test_duration.py
@@ -0,0 +1,1402 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+import unittest
+
+import aniso8601
+from aniso8601.duration import (
+ _has_any_component,
+ _parse_duration_combined,
+ _parse_duration_prescribed,
+ _parse_duration_prescribed_notime,
+ _parse_duration_prescribed_time,
+ get_duration_resolution,
+ parse_duration,
+)
+from aniso8601.exceptions import ISOFormatError
+from aniso8601.resolution import DurationResolution
+from aniso8601.tests.compat import mock
+
+
+class TestDurationParserFunctions(unittest.TestCase):
+ def test_get_duration_resolution_years(self):
+ self.assertEqual(get_duration_resolution("P1Y"), DurationResolution.Years)
+ self.assertEqual(get_duration_resolution("P1,5Y"), DurationResolution.Years)
+ self.assertEqual(get_duration_resolution("P1.5Y"), DurationResolution.Years)
+
+ def test_get_duration_resolution_months(self):
+ self.assertEqual(get_duration_resolution("P1Y2M"), DurationResolution.Months)
+ self.assertEqual(get_duration_resolution("P1M"), DurationResolution.Months)
+ self.assertEqual(get_duration_resolution("P1,5M"), DurationResolution.Months)
+ self.assertEqual(get_duration_resolution("P1.5M"), DurationResolution.Months)
+
+ def test_get_duration_resolution_weeks(self):
+ self.assertEqual(get_duration_resolution("P1W"), DurationResolution.Weeks)
+ self.assertEqual(get_duration_resolution("P1,5W"), DurationResolution.Weeks)
+ self.assertEqual(get_duration_resolution("P1.5W"), DurationResolution.Weeks)
+
+ def test_get_duration_resolution_days(self):
+ self.assertEqual(get_duration_resolution("P1Y2M3D"), DurationResolution.Days)
+ self.assertEqual(get_duration_resolution("P1Y2M3,5D"), DurationResolution.Days)
+ self.assertEqual(get_duration_resolution("P1Y2M3.5D"), DurationResolution.Days)
+ self.assertEqual(get_duration_resolution("P1D"), DurationResolution.Days)
+ self.assertEqual(get_duration_resolution("P1,5D"), DurationResolution.Days)
+ self.assertEqual(get_duration_resolution("P1.5D"), DurationResolution.Days)
+
+ def test_get_duration_resolution_hours(self):
+ self.assertEqual(
+ get_duration_resolution("P1Y2M3DT4H"), DurationResolution.Hours
+ )
+ self.assertEqual(get_duration_resolution("PT4H"), DurationResolution.Hours)
+
+ def test_get_duration_resolution_minutes(self):
+ self.assertEqual(
+ get_duration_resolution("P1Y2M3DT4H5M"), DurationResolution.Minutes
+ )
+ self.assertEqual(get_duration_resolution("PT4H5M"), DurationResolution.Minutes)
+
+ def test_get_duration_resolution_seconds(self):
+ self.assertEqual(
+ get_duration_resolution("P1Y2M3DT4H54M6S"), DurationResolution.Seconds
+ )
+ self.assertEqual(
+ get_duration_resolution("P1Y2M3DT4H54M6,5S"), DurationResolution.Seconds
+ )
+ self.assertEqual(
+ get_duration_resolution("P1Y2M3DT4H54M6.5S"), DurationResolution.Seconds
+ )
+ self.assertEqual(
+ get_duration_resolution("PT4H54M6,5S"), DurationResolution.Seconds
+ )
+ self.assertEqual(
+ get_duration_resolution("PT4H54M6.5S"), DurationResolution.Seconds
+ )
+ self.assertEqual(
+ get_duration_resolution("PT0.0000001S"), DurationResolution.Seconds
+ )
+ self.assertEqual(
+ get_duration_resolution("PT2.0000048S"), DurationResolution.Seconds
+ )
+ self.assertEqual(
+ get_duration_resolution("P0003-06-04T12:30:05"), DurationResolution.Seconds
+ )
+ self.assertEqual(
+ get_duration_resolution("P0003-06-04T12:30:05.5"),
+ DurationResolution.Seconds,
+ )
+ self.assertEqual(
+ get_duration_resolution("P0001-02-03T14:43:59.9999997"),
+ DurationResolution.Seconds,
+ )
+
+ def test_parse_duration(self):
+ testtuples = (
+ (
+ "P1Y2M3DT4H54M6S",
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6",
+ },
+ ),
+ (
+ "P1Y2M3DT4H54M6,5S",
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6.5",
+ },
+ ),
+ (
+ "P1Y2M3DT4H54M6.5S",
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6.5",
+ },
+ ),
+ (
+ "P1YT4H",
+ {
+ "PnY": "1",
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": None,
+ "TnS": None,
+ },
+ ),
+ (
+ "P1YT54M",
+ {
+ "PnY": "1",
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": None,
+ "TnM": "54",
+ "TnS": None,
+ },
+ ),
+ (
+ "P1YT6S",
+ {
+ "PnY": "1",
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": None,
+ "TnM": None,
+ "TnS": "6",
+ },
+ ),
+ (
+ "P1YT4H54M",
+ {
+ "PnY": "1",
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": None,
+ },
+ ),
+ (
+ "P1YT4H6S",
+ {
+ "PnY": "1",
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": None,
+ "TnS": "6",
+ },
+ ),
+ (
+ "P1YT54M6S",
+ {
+ "PnY": "1",
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": None,
+ "TnM": "54",
+ "TnS": "6",
+ },
+ ),
+ (
+ "P1YT4H54M6S",
+ {
+ "PnY": "1",
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6",
+ },
+ ),
+ (
+ "P2MT4H",
+ {
+ "PnY": None,
+ "PnM": "2",
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": None,
+ "TnS": None,
+ },
+ ),
+ (
+ "P2MT54M",
+ {
+ "PnY": None,
+ "PnM": "2",
+ "PnW": None,
+ "PnD": None,
+ "TnH": None,
+ "TnM": "54",
+ "TnS": None,
+ },
+ ),
+ (
+ "P2MT6S",
+ {
+ "PnY": None,
+ "PnM": "2",
+ "PnW": None,
+ "PnD": None,
+ "TnH": None,
+ "TnM": None,
+ "TnS": "6",
+ },
+ ),
+ (
+ "P2MT4H54M",
+ {
+ "PnY": None,
+ "PnM": "2",
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": None,
+ },
+ ),
+ (
+ "P2MT4H6S",
+ {
+ "PnY": None,
+ "PnM": "2",
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": None,
+ "TnS": "6",
+ },
+ ),
+ (
+ "P2MT54M6S",
+ {
+ "PnY": None,
+ "PnM": "2",
+ "PnW": None,
+ "PnD": None,
+ "TnH": None,
+ "TnM": "54",
+ "TnS": "6",
+ },
+ ),
+ (
+ "P2MT4H54M6S",
+ {
+ "PnY": None,
+ "PnM": "2",
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6",
+ },
+ ),
+ (
+ "P3DT4H",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": None,
+ "TnS": None,
+ },
+ ),
+ (
+ "P3DT54M",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": "3",
+ "TnH": None,
+ "TnM": "54",
+ "TnS": None,
+ },
+ ),
+ (
+ "P3DT6S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": "3",
+ "TnH": None,
+ "TnM": None,
+ "TnS": "6",
+ },
+ ),
+ (
+ "P3DT4H54M",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": None,
+ },
+ ),
+ (
+ "P3DT4H6S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": None,
+ "TnS": "6",
+ },
+ ),
+ (
+ "P3DT54M6S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": "3",
+ "TnH": None,
+ "TnM": "54",
+ "TnS": "6",
+ },
+ ),
+ (
+ "P3DT4H54M6S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6",
+ },
+ ),
+ (
+ "P1Y2MT4H",
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": None,
+ "TnS": None,
+ },
+ ),
+ (
+ "P1Y2MT54M",
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": None,
+ "PnD": None,
+ "TnH": None,
+ "TnM": "54",
+ "TnS": None,
+ },
+ ),
+ (
+ "P1Y2MT6S",
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": None,
+ "PnD": None,
+ "TnH": None,
+ "TnM": None,
+ "TnS": "6",
+ },
+ ),
+ (
+ "P1Y2MT4H54M",
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": None,
+ },
+ ),
+ (
+ "P1Y2MT4H6S",
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": None,
+ "TnS": "6",
+ },
+ ),
+ (
+ "P1Y2MT54M6S",
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": None,
+ "PnD": None,
+ "TnH": None,
+ "TnM": "54",
+ "TnS": "6",
+ },
+ ),
+ (
+ "P1Y2MT4H54M6S",
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6",
+ },
+ ),
+ (
+ "P1Y3DT4H",
+ {
+ "PnY": "1",
+ "PnM": None,
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": None,
+ "TnS": None,
+ },
+ ),
+ (
+ "P1Y3DT54M",
+ {
+ "PnY": "1",
+ "PnM": None,
+ "PnW": None,
+ "PnD": "3",
+ "TnH": None,
+ "TnM": "54",
+ "TnS": None,
+ },
+ ),
+ (
+ "P1Y3DT6S",
+ {
+ "PnY": "1",
+ "PnM": None,
+ "PnW": None,
+ "PnD": "3",
+ "TnH": None,
+ "TnM": None,
+ "TnS": "6",
+ },
+ ),
+ (
+ "P1Y3DT4H54M",
+ {
+ "PnY": "1",
+ "PnM": None,
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": None,
+ },
+ ),
+ (
+ "P1Y3DT4H6S",
+ {
+ "PnY": "1",
+ "PnM": None,
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": None,
+ "TnS": "6",
+ },
+ ),
+ (
+ "P1Y3DT54M6S",
+ {
+ "PnY": "1",
+ "PnM": None,
+ "PnW": None,
+ "PnD": "3",
+ "TnH": None,
+ "TnM": "54",
+ "TnS": "6",
+ },
+ ),
+ (
+ "P1Y3DT4H54M6S",
+ {
+ "PnY": "1",
+ "PnM": None,
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6",
+ },
+ ),
+ (
+ "P2M3DT4H",
+ {
+ "PnY": None,
+ "PnM": "2",
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": None,
+ "TnS": None,
+ },
+ ),
+ (
+ "P2M3DT54M",
+ {
+ "PnY": None,
+ "PnM": "2",
+ "PnW": None,
+ "PnD": "3",
+ "TnH": None,
+ "TnM": "54",
+ "TnS": None,
+ },
+ ),
+ (
+ "P2M3DT6S",
+ {
+ "PnY": None,
+ "PnM": "2",
+ "PnW": None,
+ "PnD": "3",
+ "TnH": None,
+ "TnM": None,
+ "TnS": "6",
+ },
+ ),
+ (
+ "P2M3DT4H54M",
+ {
+ "PnY": None,
+ "PnM": "2",
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": None,
+ },
+ ),
+ (
+ "P2M3DT4H6S",
+ {
+ "PnY": None,
+ "PnM": "2",
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": None,
+ "TnS": "6",
+ },
+ ),
+ (
+ "P2M3DT54M6S",
+ {
+ "PnY": None,
+ "PnM": "2",
+ "PnW": None,
+ "PnD": "3",
+ "TnH": None,
+ "TnM": "54",
+ "TnS": "6",
+ },
+ ),
+ (
+ "P2M3DT4H54M6S",
+ {
+ "PnY": None,
+ "PnM": "2",
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6",
+ },
+ ),
+ (
+ "PT4H54M6,5S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6.5",
+ },
+ ),
+ (
+ "PT4H54M6.5S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6.5",
+ },
+ ),
+ (
+ "PT4H",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": None,
+ "TnS": None,
+ },
+ ),
+ (
+ "PT5M",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": None,
+ "TnM": "5",
+ "TnS": None,
+ },
+ ),
+ (
+ "PT6S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": None,
+ "TnM": None,
+ "TnS": "6",
+ },
+ ),
+ (
+ "PT1H2M",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": "1",
+ "TnM": "2",
+ "TnS": None,
+ },
+ ),
+ (
+ "PT3H4S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": "3",
+ "TnM": None,
+ "TnS": "4",
+ },
+ ),
+ (
+ "PT5M6S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": None,
+ "TnM": "5",
+ "TnS": "6",
+ },
+ ),
+ (
+ "PT0.0000001S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": None,
+ "TnM": None,
+ "TnS": "0.0000001",
+ },
+ ),
+ (
+ "PT2.0000048S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": None,
+ "TnM": None,
+ "TnS": "2.0000048",
+ },
+ ),
+ ("P1Y", {"PnY": "1", "PnM": None, "PnW": None, "PnD": None}),
+ ("P1,5Y", {"PnY": "1.5", "PnM": None, "PnW": None, "PnD": None}),
+ ("P1.5Y", {"PnY": "1.5", "PnM": None, "PnW": None, "PnD": None}),
+ ("P1M", {"PnY": None, "PnM": "1", "PnW": None, "PnD": None}),
+ ("P1,5M", {"PnY": None, "PnM": "1.5", "PnW": None, "PnD": None}),
+ ("P1.5M", {"PnY": None, "PnM": "1.5", "PnW": None, "PnD": None}),
+ ("P1W", {"PnY": None, "PnM": None, "PnW": "1", "PnD": None}),
+ ("P1,5W", {"PnY": None, "PnM": None, "PnW": "1.5", "PnD": None}),
+ ("P1.5W", {"PnY": None, "PnM": None, "PnW": "1.5", "PnD": None}),
+ ("P1D", {"PnY": None, "PnM": None, "PnW": None, "PnD": "1"}),
+ ("P1,5D", {"PnY": None, "PnM": None, "PnW": None, "PnD": "1.5"}),
+ ("P1.5D", {"PnY": None, "PnM": None, "PnW": None, "PnD": "1.5"}),
+ ("P1Y2M3D", {"PnY": "1", "PnM": "2", "PnW": None, "PnD": "3"}),
+ ("P1Y2M3,5D", {"PnY": "1", "PnM": "2", "PnW": None, "PnD": "3.5"}),
+ ("P1Y2M3.5D", {"PnY": "1", "PnM": "2", "PnW": None, "PnD": "3.5"}),
+ ("P1Y2M", {"PnY": "1", "PnM": "2", "PnW": None, "PnD": None}),
+ (
+ "P0003-06-04T12:30:05",
+ {
+ "PnY": "0003",
+ "PnM": "06",
+ "PnD": "04",
+ "TnH": "12",
+ "TnM": "30",
+ "TnS": "05",
+ },
+ ),
+ (
+ "P0003-06-04T12:30:05.5",
+ {
+ "PnY": "0003",
+ "PnM": "06",
+ "PnD": "04",
+ "TnH": "12",
+ "TnM": "30",
+ "TnS": "05.5",
+ },
+ ),
+ (
+ "P0001-02-03T14:43:59.9999997",
+ {
+ "PnY": "0001",
+ "PnM": "02",
+ "PnD": "03",
+ "TnH": "14",
+ "TnM": "43",
+ "TnS": "59.9999997",
+ },
+ ),
+ )
+
+ for testtuple in testtuples:
+ with mock.patch.object(
+ aniso8601.duration.PythonTimeBuilder, "build_duration"
+ ) as mockBuildDuration:
+ mockBuildDuration.return_value = testtuple[1]
+
+ result = parse_duration(testtuple[0])
+
+ self.assertEqual(result, testtuple[1])
+ mockBuildDuration.assert_called_once_with(**testtuple[1])
+
+ def test_parse_duration_mockbuilder(self):
+ mockBuilder = mock.Mock()
+
+ expectedargs = {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6",
+ }
+
+ mockBuilder.build_duration.return_value = expectedargs
+
+ result = parse_duration("P1Y2M3DT4H54M6S", builder=mockBuilder)
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.build_duration.assert_called_once_with(**expectedargs)
+
+ def test_parse_duration_badtype(self):
+ testtuples = (None, 1, False, 1.234)
+
+ for testtuple in testtuples:
+ with self.assertRaises(ValueError):
+ parse_duration(testtuple, builder=None)
+
+ def test_parse_duration_nop(self):
+ with self.assertRaises(ISOFormatError):
+ # Duration must start with a P
+ parse_duration("1Y2M3DT4H54M6S", builder=None)
+
+ def test_parse_duration_weekcombination(self):
+ # Week designator cannot be combined with other time designators
+ # https://bitbucket.org/nielsenb/aniso8601/issues/2/week-designators-should-not-be-combinable
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P1Y2W", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P1M2W", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P2W3D", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P1Y2W3D", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P1M2W3D", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P1Y1M2W3D", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P7WT4H", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P7WT54M", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P7WT6S", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P7WT4H54M", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P7WT4H6S", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P7WT54M6S", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P7WT4H54M6S", builder=None)
+
+ def test_parse_duration_negative(self):
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P-1Y", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P-2M", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P-3D", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P-T4H", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P-T54M", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P-T6S", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P-7W", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P-1Y2M3DT4H54M6S", builder=None)
+
+ def test_parse_duration_outoforder(self):
+ # Ensure durations are required to be in the correct order
+ # https://bitbucket.org/nielsenb/aniso8601/issues/7/durations-with-time-components-before-t
+ # https://bitbucket.org/nielsenb/aniso8601/issues/8/durations-with-components-in-wrong-order
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P1S", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P1D1S", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P1H1M", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("1Y2M3D1SPT1M", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P1Y2M3D2MT1S", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P2M3D1ST1Y1M", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P1Y2M2MT3D1S", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("P1D1Y1M", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_duration("PT1S1H", builder=None)
+
+ def test_parse_duration_badstr(self):
+ testtuples = (
+ "PPPPPPPPPPPPPPPPPPPPPPPPPPPP",
+ "PTT",
+ "PX7DDDTX8888UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUU"
+ "UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUU8888888888888888H$H",
+ "P1Y2M3X.4D",
+ "P1Y2M3.4XD",
+ "P1Y2M3DT4H5M6XS",
+ "PT4H5M6X.2S",
+ "bad",
+ "",
+ )
+
+ for testtuple in testtuples:
+ with self.assertRaises(ISOFormatError):
+ parse_duration(testtuple, builder=None)
+
+ def test_parse_duration_prescribed(self):
+ testtuples = (
+ (
+ "P1Y2M3DT4H54M6S",
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6",
+ },
+ ),
+ (
+ "P1Y2M3DT4H54M6,5S",
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6.5",
+ },
+ ),
+ (
+ "P1Y2M3DT4H54M6.5S",
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6.5",
+ },
+ ),
+ (
+ "PT4H54M6,5S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6.5",
+ },
+ ),
+ (
+ "PT4H54M6.5S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6.5",
+ },
+ ),
+ ("P1Y2M3D", {"PnY": "1", "PnM": "2", "" "PnW": None, "PnD": "3"}),
+ ("P1Y2M3,5D", {"PnY": "1", "PnM": "2", "PnW": None, "PnD": "3.5"}),
+ ("P1Y2M3.5D", {"PnY": "1", "PnM": "2", "PnW": None, "PnD": "3.5"}),
+ ("P1Y2M", {"PnY": "1", "PnM": "2", "PnW": None, "PnD": None}),
+ ("P1Y", {"PnY": "1", "PnM": None, "PnW": None, "PnD": None}),
+ ("P1,5Y", {"PnY": "1.5", "PnM": None, "PnW": None, "PnD": None}),
+ ("P1.5Y", {"PnY": "1.5", "PnM": None, "PnW": None, "PnD": None}),
+ ("P1M", {"PnY": None, "PnM": "1", "PnW": None, "PnD": None}),
+ ("P1,5M", {"PnY": None, "PnM": "1.5", "PnW": None, "PnD": None}),
+ ("P1.5M", {"PnY": None, "PnM": "1.5", "PnW": None, "PnD": None}),
+ ("P1W", {"PnY": None, "PnM": None, "PnW": "1", "PnD": None}),
+ ("P1,5W", {"PnY": None, "PnM": None, "PnW": "1.5", "PnD": None}),
+ ("P1.5W", {"PnY": None, "PnM": None, "PnW": "1.5", "PnD": None}),
+ ("P1D", {"PnY": None, "PnM": None, "PnW": None, "PnD": "1"}),
+ ("P1,5D", {"PnY": None, "PnM": None, "PnW": None, "PnD": "1.5"}),
+ ("P1.5D", {"PnY": None, "PnM": None, "PnW": None, "PnD": "1.5"}),
+ )
+
+ for testtuple in testtuples:
+ result = _parse_duration_prescribed(testtuple[0])
+
+ self.assertEqual(result, testtuple[1])
+
+ def test_parse_duration_prescribed_negative(self):
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed("P-1Y")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed("P-2M")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed("P-3D")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed("P-4W")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed("P-1Y2M3D")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed("P-T1H")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed("P-T2M")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed("P-T3S")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed("P-1Y2M3DT4H54M6S")
+
+ def test_parse_duration_prescribed_multiplefractions(self):
+ with self.assertRaises(ISOFormatError):
+ # Multiple fractions are not allowed
+ _parse_duration_prescribed("P1Y2M3DT4H5.1234M6.1234S")
+
+ def test_parse_duration_prescribed_middlefraction(self):
+ with self.assertRaises(ISOFormatError):
+ # Fraction only allowed on final component
+ _parse_duration_prescribed("P1Y2M3DT4H5.1234M6S")
+
+ def test_parse_duration_prescribed_suffixgarbage(self):
+ # Don't allow garbage after the duration
+ # https://bitbucket.org/nielsenb/aniso8601/issues/9/durations-with-trailing-garbage-are-parsed
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed("P1Dasdfasdf")
+
+ def test_parse_duration_prescribed_notime(self):
+ testtuples = (
+ ("P1Y2M3D", {"PnY": "1", "PnM": "2", "PnW": None, "PnD": "3"}),
+ ("P1Y2M3,5D", {"PnY": "1", "PnM": "2", "PnW": None, "PnD": "3.5"}),
+ ("P1Y2M3.5D", {"PnY": "1", "PnM": "2", "PnW": None, "PnD": "3.5"}),
+ ("P1Y3D", {"PnY": "1", "PnM": None, "PnW": None, "PnD": "3"}),
+ ("P1Y2M", {"PnY": "1", "PnM": "2", "PnW": None, "PnD": None}),
+ ("P2M3D", {"PnY": None, "PnM": "2", "PnW": None, "PnD": "3"}),
+ ("P1Y", {"PnY": "1", "PnM": None, "PnW": None, "PnD": None}),
+ ("P1,5Y", {"PnY": "1.5", "PnM": None, "PnW": None, "PnD": None}),
+ ("P1.5Y", {"PnY": "1.5", "PnM": None, "PnW": None, "PnD": None}),
+ ("P1M", {"PnY": None, "PnM": "1", "PnW": None, "PnD": None}),
+ ("P1,5M", {"PnY": None, "PnM": "1.5", "PnW": None, "PnD": None}),
+ ("P1.5M", {"PnY": None, "PnM": "1.5", "PnW": None, "PnD": None}),
+ ("P1W", {"PnY": None, "PnM": None, "PnW": "1", "PnD": None}),
+ ("P1,5W", {"PnY": None, "PnM": None, "PnW": "1.5", "PnD": None}),
+ ("P1.5W", {"PnY": None, "PnM": None, "PnW": "1.5", "PnD": None}),
+ ("P1D", {"PnY": None, "PnM": None, "PnW": None, "PnD": "1"}),
+ ("P1,5D", {"PnY": None, "PnM": None, "PnW": None, "PnD": "1.5"}),
+ ("P1.5D", {"PnY": None, "PnM": None, "PnW": None, "PnD": "1.5"}),
+ )
+
+ for testtuple in testtuples:
+ result = _parse_duration_prescribed_notime(testtuple[0])
+
+ self.assertEqual(result, testtuple[1])
+
+ def test_parse_duration_prescribed_notime_timepart(self):
+ # Ensure no time part is allowed
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_notime("P1S")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_notime("P1D1S")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_notime("P1H1M")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_notime("P1Y2M3D4H")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_notime("P1Y2M3D4H5S")
+
+ def test_parse_duration_prescribed_notime_outoforder(self):
+ # Ensure durations are required to be in the correct order
+ # https://bitbucket.org/nielsenb/aniso8601/issues/8/durations-with-components-in-wrong-order
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_notime("P1H1M")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_notime("P1D1Y1M")
+
+ def test_parse_duration_prescribed_notime_badstr(self):
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_notime("P1S")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_notime("P1D1S")
+
+ def test_parse_duration_prescribed_time(self):
+ testtuples = (
+ (
+ "P1Y2M3DT4H54M6S",
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6",
+ },
+ ),
+ (
+ "P1Y2M3DT4H54M6,5S",
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6.5",
+ },
+ ),
+ (
+ "P1Y2M3DT4H54M6.5S",
+ {
+ "PnY": "1",
+ "PnM": "2",
+ "PnW": None,
+ "PnD": "3",
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6.5",
+ },
+ ),
+ (
+ "PT4H54M6,5S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6.5",
+ },
+ ),
+ (
+ "PT4H54M6.5S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": "54",
+ "TnS": "6.5",
+ },
+ ),
+ (
+ "PT4H",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": "4",
+ "TnM": None,
+ "TnS": None,
+ },
+ ),
+ (
+ "PT5M",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": None,
+ "TnM": "5",
+ "TnS": None,
+ },
+ ),
+ (
+ "PT6S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": None,
+ "TnM": None,
+ "TnS": "6",
+ },
+ ),
+ (
+ "PT1H2M",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": "1",
+ "TnM": "2",
+ "TnS": None,
+ },
+ ),
+ (
+ "PT3H4S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": "3",
+ "TnM": None,
+ "TnS": "4",
+ },
+ ),
+ (
+ "PT5M6S",
+ {
+ "PnY": None,
+ "PnM": None,
+ "PnW": None,
+ "PnD": None,
+ "TnH": None,
+ "TnM": "5",
+ "TnS": "6",
+ },
+ ),
+ )
+
+ for testtuple in testtuples:
+ result = _parse_duration_prescribed_time(testtuple[0])
+
+ self.assertEqual(result, testtuple[1])
+
+ def test_parse_duration_prescribed_time_timeindate(self):
+ # Don't allow time components in date half
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_time("P1Y2M3D4HT54M6S")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_time("P1Y2M3D6ST4H54M")
+
+ def test_parse_duration_prescribed_time_dateintime(self):
+ # Don't allow date components in time half
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_time("P2M3DT1Y4H54M6S")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_time("P1Y2MT3D4H54M6S")
+
+ def test_parse_duration_prescribed_time_outoforder(self):
+ # Ensure durations are required to be in the correct order
+ # https://bitbucket.org/nielsenb/aniso8601/issues/7/durations-with-time-components-before-t
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_time("1Y2M3D1SPT1M")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_time("P1Y2M3D2MT1S")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_time("P2M3D1ST1Y1M")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_time("P1Y2M2MT3D1S")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_time("PT1S1H")
+
+ def test_parse_duration_prescribed_time_badstr(self):
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_time("P1Y")
+
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_prescribed_time("P1Y1M")
+
+ def test_parse_duration_combined(self):
+ testtuples = (
+ (
+ "P0003-06-04T12:30:05",
+ {
+ "PnY": "0003",
+ "PnM": "06",
+ "PnD": "04",
+ "TnH": "12",
+ "TnM": "30",
+ "TnS": "05",
+ },
+ ),
+ (
+ "P0003-06-04T12:30:05,5",
+ {
+ "PnY": "0003",
+ "PnM": "06",
+ "PnD": "04",
+ "TnH": "12",
+ "TnM": "30",
+ "TnS": "05.5",
+ },
+ ),
+ (
+ "P0003-06-04T12:30:05.5",
+ {
+ "PnY": "0003",
+ "PnM": "06",
+ "PnD": "04",
+ "TnH": "12",
+ "TnM": "30",
+ "TnS": "05.5",
+ },
+ ),
+ (
+ "P0001-02-03T14:43:59.9999997",
+ {
+ "PnY": "0001",
+ "PnM": "02",
+ "PnD": "03",
+ "TnH": "14",
+ "TnM": "43",
+ "TnS": "59.9999997",
+ },
+ ),
+ )
+
+ for testtuple in testtuples:
+ result = _parse_duration_combined(testtuple[0])
+
+ self.assertEqual(result, testtuple[1])
+
+ def test_parse_duration_combined_suffixgarbage(self):
+ # Don't allow garbage after the duration
+ # https://bitbucket.org/nielsenb/aniso8601/issues/9/durations-with-trailing-garbage-are-parsed
+ with self.assertRaises(ISOFormatError):
+ _parse_duration_combined("P0003-06-04T12:30:05.5asdfasdf")
+
+ def test_has_any_component(self):
+ self.assertTrue(_has_any_component("P1Y", ["Y", "M"]))
+ self.assertFalse(_has_any_component("P1Y", ["M", "D"]))
diff --git a/libs/aniso8601/tests/test_init.py b/libs/aniso8601/tests/test_init.py
new file mode 100644
index 000000000..d5604c6b9
--- /dev/null
+++ b/libs/aniso8601/tests/test_init.py
@@ -0,0 +1,49 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+import unittest
+
+import aniso8601
+
+
+class TestInitFunctions(unittest.TestCase):
+ def test_import(self):
+ # Verify the function mappings
+ self.assertEqual(aniso8601.parse_datetime, aniso8601.time.parse_datetime)
+ self.assertEqual(aniso8601.parse_time, aniso8601.time.parse_time)
+ self.assertEqual(
+ aniso8601.get_time_resolution, aniso8601.time.get_time_resolution
+ )
+ self.assertEqual(
+ aniso8601.get_datetime_resolution, aniso8601.time.get_datetime_resolution
+ )
+
+ self.assertEqual(aniso8601.parse_date, aniso8601.date.parse_date)
+ self.assertEqual(
+ aniso8601.get_date_resolution, aniso8601.date.get_date_resolution
+ )
+
+ self.assertEqual(aniso8601.parse_duration, aniso8601.duration.parse_duration)
+ self.assertEqual(
+ aniso8601.get_duration_resolution,
+ aniso8601.duration.get_duration_resolution,
+ )
+
+ self.assertEqual(aniso8601.parse_interval, aniso8601.interval.parse_interval)
+ self.assertEqual(
+ aniso8601.parse_repeating_interval,
+ aniso8601.interval.parse_repeating_interval,
+ )
+ self.assertEqual(
+ aniso8601.get_interval_resolution,
+ aniso8601.interval.get_interval_resolution,
+ )
+ self.assertEqual(
+ aniso8601.get_repeating_interval_resolution,
+ aniso8601.interval.get_repeating_interval_resolution,
+ )
diff --git a/libs/aniso8601/tests/test_interval.py b/libs/aniso8601/tests/test_interval.py
new file mode 100644
index 000000000..f01d15112
--- /dev/null
+++ b/libs/aniso8601/tests/test_interval.py
@@ -0,0 +1,1675 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+import unittest
+
+import aniso8601
+from aniso8601.builders import (
+ DatetimeTuple,
+ DateTuple,
+ DurationTuple,
+ IntervalTuple,
+ TimeTuple,
+ TimezoneTuple,
+)
+from aniso8601.exceptions import ISOFormatError
+from aniso8601.interval import (
+ _get_interval_component_resolution,
+ _get_interval_resolution,
+ _parse_interval,
+ _parse_interval_end,
+ get_interval_resolution,
+ get_repeating_interval_resolution,
+ parse_interval,
+ parse_repeating_interval,
+)
+from aniso8601.resolution import IntervalResolution
+from aniso8601.tests.compat import mock
+
+
+class TestIntervalParser_UtilityFunctions(unittest.TestCase):
+ def test_get_interval_resolution(self):
+ self.assertEqual(
+ _get_interval_resolution(
+ IntervalTuple(
+ start=DateTuple(
+ YYYY="2001", MM="02", DD="03", Www=None, D=None, DDD=None
+ ),
+ end=DatetimeTuple(
+ DateTuple(
+ YYYY="2001", MM="02", DD="03", Www=None, D=None, DDD=None
+ ),
+ TimeTuple(hh="04", mm="05", ss="06", tz=None),
+ ),
+ duration=None,
+ )
+ ),
+ IntervalResolution.Seconds,
+ )
+ self.assertEqual(
+ _get_interval_resolution(
+ IntervalTuple(
+ start=DatetimeTuple(
+ DateTuple(
+ YYYY="2001", MM="02", DD="03", Www=None, D=None, DDD=None
+ ),
+ TimeTuple(hh="04", mm="05", ss="06", tz=None),
+ ),
+ end=DateTuple(
+ YYYY="2001", MM="02", DD="03", Www=None, D=None, DDD=None
+ ),
+ duration=None,
+ )
+ ),
+ IntervalResolution.Seconds,
+ )
+
+ self.assertEqual(
+ _get_interval_resolution(
+ IntervalTuple(
+ start=DateTuple(
+ YYYY="2001", MM="02", DD="03", Www=None, D=None, DDD=None
+ ),
+ end=None,
+ duration=DurationTuple(
+ PnY="1", PnM="2", PnW=None, PnD="3", TnH="4", TnM="5", TnS="6"
+ ),
+ )
+ ),
+ IntervalResolution.Seconds,
+ )
+ self.assertEqual(
+ _get_interval_resolution(
+ IntervalTuple(
+ start=DatetimeTuple(
+ DateTuple(
+ YYYY="2001", MM="02", DD="03", Www=None, D=None, DDD=None
+ ),
+ TimeTuple(hh="04", mm="05", ss="06", tz=None),
+ ),
+ end=None,
+ duration=DurationTuple(
+ PnY="1",
+ PnM="2",
+ PnW=None,
+ PnD="3",
+ TnH=None,
+ TnM=None,
+ TnS=None,
+ ),
+ )
+ ),
+ IntervalResolution.Seconds,
+ )
+
+ self.assertEqual(
+ _get_interval_resolution(
+ IntervalTuple(
+ start=None,
+ end=DateTuple(
+ YYYY="2001", MM="02", DD="03", Www=None, D=None, DDD=None
+ ),
+ duration=DurationTuple(
+ PnY="1", PnM="2", PnW=None, PnD="3", TnH="4", TnM="5", TnS="6"
+ ),
+ )
+ ),
+ IntervalResolution.Seconds,
+ )
+ self.assertEqual(
+ _get_interval_resolution(
+ IntervalTuple(
+ start=None,
+ end=DatetimeTuple(
+ DateTuple(
+ YYYY="2001", MM="02", DD="03", Www=None, D=None, DDD=None
+ ),
+ TimeTuple(hh="04", mm="05", ss="06", tz=None),
+ ),
+ duration=DurationTuple(
+ PnY="1",
+ PnM="2",
+ PnW=None,
+ PnD="3",
+ TnH=None,
+ TnM=None,
+ TnS=None,
+ ),
+ )
+ ),
+ IntervalResolution.Seconds,
+ )
+
+ def test_get_interval_component_resolution(self):
+ self.assertEqual(
+ _get_interval_component_resolution(
+ DateTuple(YYYY="2001", MM=None, DD=None, Www=None, D=None, DDD="123")
+ ),
+ IntervalResolution.Ordinal,
+ )
+ self.assertEqual(
+ _get_interval_component_resolution(
+ DateTuple(YYYY="2001", MM=None, DD=None, Www="12", D="3", DDD=None)
+ ),
+ IntervalResolution.Weekday,
+ )
+ self.assertEqual(
+ _get_interval_component_resolution(
+ DateTuple(YYYY="2001", MM=None, DD=None, Www="12", D=None, DDD=None)
+ ),
+ IntervalResolution.Week,
+ )
+ self.assertEqual(
+ _get_interval_component_resolution(
+ DateTuple(YYYY="2001", MM="02", DD="03", Www=None, D=None, DDD=None)
+ ),
+ IntervalResolution.Day,
+ )
+ self.assertEqual(
+ _get_interval_component_resolution(
+ DateTuple(YYYY="2001", MM="02", DD=None, Www=None, D=None, DDD=None)
+ ),
+ IntervalResolution.Month,
+ )
+ self.assertEqual(
+ _get_interval_component_resolution(
+ DateTuple(YYYY="2001", MM=None, DD=None, Www=None, D=None, DDD=None)
+ ),
+ IntervalResolution.Year,
+ )
+
+ self.assertEqual(
+ _get_interval_component_resolution(
+ DatetimeTuple(
+ DateTuple(
+ YYYY="2001", MM="02", DD="03", Www=None, D=None, DDD=None
+ ),
+ TimeTuple(hh="04", mm="05", ss="06", tz=None),
+ )
+ ),
+ IntervalResolution.Seconds,
+ )
+ self.assertEqual(
+ _get_interval_component_resolution(
+ DatetimeTuple(
+ DateTuple(
+ YYYY="2001", MM="02", DD="03", Www=None, D=None, DDD=None
+ ),
+ TimeTuple(hh="04", mm="05", ss=None, tz=None),
+ )
+ ),
+ IntervalResolution.Minutes,
+ )
+ self.assertEqual(
+ _get_interval_component_resolution(
+ DatetimeTuple(
+ DateTuple(
+ YYYY="2001", MM="02", DD="03", Www=None, D=None, DDD=None
+ ),
+ TimeTuple(hh="04", mm=None, ss=None, tz=None),
+ )
+ ),
+ IntervalResolution.Hours,
+ )
+
+ self.assertEqual(
+ _get_interval_component_resolution(
+ DurationTuple(
+ PnY="1", PnM="2", PnW=None, PnD="3", TnH="4", TnM="5", TnS="6"
+ )
+ ),
+ IntervalResolution.Seconds,
+ )
+ self.assertEqual(
+ _get_interval_component_resolution(
+ DurationTuple(
+ PnY="1", PnM="2", PnW=None, PnD="3", TnH="4", TnM="5", TnS=None
+ )
+ ),
+ IntervalResolution.Minutes,
+ )
+ self.assertEqual(
+ _get_interval_component_resolution(
+ DurationTuple(
+ PnY="1", PnM="2", PnW=None, PnD="3", TnH="4", TnM=None, TnS=None
+ )
+ ),
+ IntervalResolution.Hours,
+ )
+ self.assertEqual(
+ _get_interval_component_resolution(
+ DurationTuple(
+ PnY="1", PnM="2", PnW=None, PnD="3", TnH=None, TnM=None, TnS=None
+ )
+ ),
+ IntervalResolution.Day,
+ )
+ self.assertEqual(
+ _get_interval_component_resolution(
+ DurationTuple(
+ PnY="1", PnM="2", PnW=None, PnD=None, TnH=None, TnM=None, TnS=None
+ )
+ ),
+ IntervalResolution.Month,
+ )
+ self.assertEqual(
+ _get_interval_component_resolution(
+ DurationTuple(
+ PnY="1", PnM=None, PnW=None, PnD=None, TnH=None, TnM=None, TnS=None
+ )
+ ),
+ IntervalResolution.Year,
+ )
+ self.assertEqual(
+ _get_interval_component_resolution(
+ DurationTuple(
+ PnY=None, PnM=None, PnW="3", PnD=None, TnH=None, TnM=None, TnS=None
+ )
+ ),
+ IntervalResolution.Week,
+ )
+
+
+class TestIntervalParserFunctions(unittest.TestCase):
+ def test_get_interval_resolution_date(self):
+ self.assertEqual(get_interval_resolution("P1.5Y/2018"), IntervalResolution.Year)
+ self.assertEqual(
+ get_interval_resolution("P1.5Y/2018-03"), IntervalResolution.Month
+ )
+ self.assertEqual(
+ get_interval_resolution("P1.5Y/2018-03-06"), IntervalResolution.Day
+ )
+ self.assertEqual(
+ get_interval_resolution("P1.5Y/2018W01"), IntervalResolution.Week
+ )
+ self.assertEqual(
+ get_interval_resolution("P1.5Y/2018-306"), IntervalResolution.Ordinal
+ )
+ self.assertEqual(
+ get_interval_resolution("P1.5Y/2018W012"), IntervalResolution.Weekday
+ )
+
+ self.assertEqual(get_interval_resolution("2018/P1.5Y"), IntervalResolution.Year)
+ self.assertEqual(
+ get_interval_resolution("2018-03/P1.5Y"), IntervalResolution.Month
+ )
+ self.assertEqual(
+ get_interval_resolution("2018-03-06/P1.5Y"), IntervalResolution.Day
+ )
+ self.assertEqual(
+ get_interval_resolution("2018W01/P1.5Y"), IntervalResolution.Week
+ )
+ self.assertEqual(
+ get_interval_resolution("2018-306/P1.5Y"), IntervalResolution.Ordinal
+ )
+ self.assertEqual(
+ get_interval_resolution("2018W012/P1.5Y"), IntervalResolution.Weekday
+ )
+
+ def test_get_interval_resolution_time(self):
+ self.assertEqual(
+ get_interval_resolution("P1M/1981-04-05T01"), IntervalResolution.Hours
+ )
+ self.assertEqual(
+ get_interval_resolution("P1M/1981-04-05T01:01"), IntervalResolution.Minutes
+ )
+ self.assertEqual(
+ get_interval_resolution("P1M/1981-04-05T01:01:00"),
+ IntervalResolution.Seconds,
+ )
+
+ self.assertEqual(
+ get_interval_resolution("1981-04-05T01/P1M"), IntervalResolution.Hours
+ )
+ self.assertEqual(
+ get_interval_resolution("1981-04-05T01:01/P1M"), IntervalResolution.Minutes
+ )
+ self.assertEqual(
+ get_interval_resolution("1981-04-05T01:01:00/P1M"),
+ IntervalResolution.Seconds,
+ )
+
+ def test_get_interval_resolution_duration(self):
+ self.assertEqual(
+ get_interval_resolution("2014-11-12/P1Y2M3D"), IntervalResolution.Day
+ )
+ self.assertEqual(
+ get_interval_resolution("2014-11-12/P1Y2M"), IntervalResolution.Day
+ )
+ self.assertEqual(
+ get_interval_resolution("2014-11-12/P1Y"), IntervalResolution.Day
+ )
+ self.assertEqual(
+ get_interval_resolution("2014-11-12/P1W"), IntervalResolution.Day
+ )
+ self.assertEqual(
+ get_interval_resolution("2014-11-12/P1Y2M3DT4H"), IntervalResolution.Hours
+ )
+ self.assertEqual(
+ get_interval_resolution("2014-11-12/P1Y2M3DT4H54M"),
+ IntervalResolution.Minutes,
+ )
+ self.assertEqual(
+ get_interval_resolution("2014-11-12/P1Y2M3DT4H54M6S"),
+ IntervalResolution.Seconds,
+ )
+
+ self.assertEqual(
+ get_interval_resolution("P1Y2M3D/2014-11-12"), IntervalResolution.Day
+ )
+ self.assertEqual(
+ get_interval_resolution("P1Y2M/2014-11-12"), IntervalResolution.Day
+ )
+ self.assertEqual(
+ get_interval_resolution("P1Y/2014-11-12"), IntervalResolution.Day
+ )
+ self.assertEqual(
+ get_interval_resolution("P1W/2014-11-12"), IntervalResolution.Day
+ )
+ self.assertEqual(
+ get_interval_resolution("P1Y2M3DT4H/2014-11-12"), IntervalResolution.Hours
+ )
+ self.assertEqual(
+ get_interval_resolution("P1Y2M3DT4H54M/2014-11-12"),
+ IntervalResolution.Minutes,
+ )
+ self.assertEqual(
+ get_interval_resolution("P1Y2M3DT4H54M6S/2014-11-12"),
+ IntervalResolution.Seconds,
+ )
+
+ def test_parse_interval(self):
+ testtuples = (
+ (
+ "P1M/1981-04-05T01:01:00",
+ {
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "duration": DurationTuple(None, "1", None, None, None, None, None),
+ },
+ ),
+ (
+ "P1M/1981-04-05",
+ {
+ "end": DateTuple("1981", "04", "05", None, None, None),
+ "duration": DurationTuple(None, "1", None, None, None, None, None),
+ },
+ ),
+ (
+ "P1,5Y/2018-03-06",
+ {
+ "end": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ "1.5", None, None, None, None, None, None
+ ),
+ },
+ ),
+ (
+ "P1.5Y/2018-03-06",
+ {
+ "end": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ "1.5", None, None, None, None, None, None
+ ),
+ },
+ ),
+ (
+ "PT1H/2014-11-12",
+ {
+ "end": DateTuple("2014", "11", "12", None, None, None),
+ "duration": DurationTuple(None, None, None, None, "1", None, None),
+ },
+ ),
+ (
+ "PT4H54M6.5S/2014-11-12",
+ {
+ "end": DateTuple("2014", "11", "12", None, None, None),
+ "duration": DurationTuple(None, None, None, None, "4", "54", "6.5"),
+ },
+ ),
+ (
+ "PT10H/2050-03-01T13:00:00Z",
+ {
+ "end": DatetimeTuple(
+ DateTuple("2050", "03", "01", None, None, None),
+ TimeTuple(
+ "13",
+ "00",
+ "00",
+ TimezoneTuple(False, True, None, None, "Z"),
+ ),
+ ),
+ "duration": DurationTuple(None, None, None, None, "10", None, None),
+ },
+ ),
+ # Make sure we truncate, not round
+ # https://bitbucket.org/nielsenb/aniso8601/issues/10/sub-microsecond-precision-in-durations-is
+ (
+ "PT0.0000001S/2018-03-06",
+ {
+ "end": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, None, None, "0.0000001"
+ ),
+ },
+ ),
+ (
+ "PT2.0000048S/2018-03-06",
+ {
+ "end": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, None, None, "2.0000048"
+ ),
+ },
+ ),
+ (
+ "1981-04-05T01:01:00/P1M1DT1M",
+ {
+ "start": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "duration": DurationTuple(None, "1", None, "1", None, "1", None),
+ },
+ ),
+ (
+ "1981-04-05/P1M1D",
+ {
+ "start": DateTuple("1981", "04", "05", None, None, None),
+ "duration": DurationTuple(None, "1", None, "1", None, None, None),
+ },
+ ),
+ (
+ "2018-03-06/P2,5M",
+ {
+ "start": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ None, "2.5", None, None, None, None, None
+ ),
+ },
+ ),
+ (
+ "2018-03-06/P2.5M",
+ {
+ "start": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ None, "2.5", None, None, None, None, None
+ ),
+ },
+ ),
+ (
+ "2014-11-12/PT1H",
+ {
+ "start": DateTuple("2014", "11", "12", None, None, None),
+ "duration": DurationTuple(None, None, None, None, "1", None, None),
+ },
+ ),
+ (
+ "2014-11-12/PT4H54M6.5S",
+ {
+ "start": DateTuple("2014", "11", "12", None, None, None),
+ "duration": DurationTuple(None, None, None, None, "4", "54", "6.5"),
+ },
+ ),
+ (
+ "2050-03-01T13:00:00Z/PT10H",
+ {
+ "start": DatetimeTuple(
+ DateTuple("2050", "03", "01", None, None, None),
+ TimeTuple(
+ "13",
+ "00",
+ "00",
+ TimezoneTuple(False, True, None, None, "Z"),
+ ),
+ ),
+ "duration": DurationTuple(None, None, None, None, "10", None, None),
+ },
+ ),
+ # Make sure we truncate, not round
+ # https://bitbucket.org/nielsenb/aniso8601/issues/10/sub-microsecond-precision-in-durations-is
+ (
+ "2018-03-06/PT0.0000001S",
+ {
+ "start": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, None, None, "0.0000001"
+ ),
+ },
+ ),
+ (
+ "2018-03-06/PT2.0000048S",
+ {
+ "start": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, None, None, "2.0000048"
+ ),
+ },
+ ),
+ (
+ "1980-03-05T01:01:00/1981-04-05T01:01:00",
+ {
+ "start": DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ },
+ ),
+ (
+ "1980-03-05T01:01:00/1981-04-05",
+ {
+ "start": DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "end": DateTuple("1981", "04", "05", None, None, None),
+ },
+ ),
+ (
+ "1980-03-05/1981-04-05T01:01:00",
+ {
+ "start": DateTuple("1980", "03", "05", None, None, None),
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ },
+ ),
+ (
+ "1980-03-05/1981-04-05",
+ {
+ "start": DateTuple("1980", "03", "05", None, None, None),
+ "end": DateTuple("1981", "04", "05", None, None, None),
+ },
+ ),
+ (
+ "1981-04-05/1980-03-05",
+ {
+ "start": DateTuple("1981", "04", "05", None, None, None),
+ "end": DateTuple("1980", "03", "05", None, None, None),
+ },
+ ),
+ (
+ "2050-03-01T13:00:00Z/2050-05-11T15:30:00Z",
+ {
+ "start": DatetimeTuple(
+ DateTuple("2050", "03", "01", None, None, None),
+ TimeTuple(
+ "13",
+ "00",
+ "00",
+ TimezoneTuple(False, True, None, None, "Z"),
+ ),
+ ),
+ "end": DatetimeTuple(
+ DateTuple("2050", "05", "11", None, None, None),
+ TimeTuple(
+ "15",
+ "30",
+ "00",
+ TimezoneTuple(False, True, None, None, "Z"),
+ ),
+ ),
+ },
+ ),
+ # Test concise interval
+ (
+ "2020-01-01/02",
+ {
+ "start": DateTuple("2020", "01", "01", None, None, None),
+ "end": DateTuple(None, None, "02", None, None, None),
+ },
+ ),
+ (
+ "2008-02-15/03-14",
+ {
+ "start": DateTuple("2008", "02", "15", None, None, None),
+ "end": DateTuple(None, "03", "14", None, None, None),
+ },
+ ),
+ (
+ "2007-12-14T13:30/15:30",
+ {
+ "start": DatetimeTuple(
+ DateTuple("2007", "12", "14", None, None, None),
+ TimeTuple("13", "30", None, None),
+ ),
+ "end": TimeTuple("15", "30", None, None),
+ },
+ ),
+ (
+ "2007-11-13T09:00/15T17:00",
+ {
+ "start": DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple("09", "00", None, None),
+ ),
+ "end": DatetimeTuple(
+ DateTuple(None, None, "15", None, None, None),
+ TimeTuple("17", "00", None, None),
+ ),
+ },
+ ),
+ (
+ "2007-11-13T00:00/16T00:00",
+ {
+ "start": DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple("00", "00", None, None),
+ ),
+ "end": DatetimeTuple(
+ DateTuple(None, None, "16", None, None, None),
+ TimeTuple("00", "00", None, None),
+ ),
+ },
+ ),
+ (
+ "2007-11-13T09:00Z/15T17:00",
+ {
+ "start": DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple(
+ "09",
+ "00",
+ None,
+ TimezoneTuple(False, True, None, None, "Z"),
+ ),
+ ),
+ "end": DatetimeTuple(
+ DateTuple(None, None, "15", None, None, None),
+ TimeTuple("17", "00", None, None),
+ ),
+ },
+ ),
+ (
+ "2007-11-13T00:00/12:34.567",
+ {
+ "start": DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple("00", "00", None, None),
+ ),
+ "end": TimeTuple("12", "34.567", None, None),
+ },
+ ),
+ # Make sure we truncate, not round
+ # https://bitbucket.org/nielsenb/aniso8601/issues/10/sub-microsecond-precision-in-durations-is
+ (
+ "1980-03-05T01:01:00.0000001/" "1981-04-05T14:43:59.9999997",
+ {
+ "start": DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00.0000001", None),
+ ),
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("14", "43", "59.9999997", None),
+ ),
+ },
+ ),
+ )
+
+ for testtuple in testtuples:
+ with mock.patch.object(
+ aniso8601.interval.PythonTimeBuilder, "build_interval"
+ ) as mockBuildInterval:
+ mockBuildInterval.return_value = testtuple[1]
+
+ result = parse_interval(testtuple[0])
+
+ self.assertEqual(result, testtuple[1])
+ mockBuildInterval.assert_called_once_with(**testtuple[1])
+
+ # Test different separators
+ with mock.patch.object(
+ aniso8601.interval.PythonTimeBuilder, "build_interval"
+ ) as mockBuildInterval:
+ expectedargs = {
+ "start": DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ }
+
+ mockBuildInterval.return_value = expectedargs
+
+ result = parse_interval(
+ "1980-03-05T01:01:00--1981-04-05T01:01:00", intervaldelimiter="--"
+ )
+
+ self.assertEqual(result, expectedargs)
+ mockBuildInterval.assert_called_once_with(**expectedargs)
+
+ with mock.patch.object(
+ aniso8601.interval.PythonTimeBuilder, "build_interval"
+ ) as mockBuildInterval:
+ expectedargs = {
+ "start": DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ }
+
+ mockBuildInterval.return_value = expectedargs
+
+ result = parse_interval(
+ "1980-03-05 01:01:00/1981-04-05 01:01:00", datetimedelimiter=" "
+ )
+
+ self.assertEqual(result, expectedargs)
+ mockBuildInterval.assert_called_once_with(**expectedargs)
+
+ def test_parse_interval_mockbuilder(self):
+ mockBuilder = mock.Mock()
+
+ expectedargs = {
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "duration": DurationTuple(None, "1", None, None, None, None, None),
+ }
+
+ mockBuilder.build_interval.return_value = expectedargs
+
+ result = parse_interval("P1M/1981-04-05T01:01:00", builder=mockBuilder)
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.build_interval.assert_called_once_with(**expectedargs)
+
+ mockBuilder = mock.Mock()
+
+ expectedargs = {
+ "start": DateTuple("2014", "11", "12", None, None, None),
+ "duration": DurationTuple(None, None, None, None, "1", None, None),
+ }
+
+ mockBuilder.build_interval.return_value = expectedargs
+
+ result = parse_interval("2014-11-12/PT1H", builder=mockBuilder)
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.build_interval.assert_called_once_with(**expectedargs)
+
+ mockBuilder = mock.Mock()
+
+ expectedargs = {
+ "start": DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ }
+
+ mockBuilder.build_interval.return_value = expectedargs
+
+ result = parse_interval(
+ "1980-03-05T01:01:00/1981-04-05T01:01:00", builder=mockBuilder
+ )
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.build_interval.assert_called_once_with(**expectedargs)
+
+ def test_parse_interval_badtype(self):
+ testtuples = (None, 1, False, 1.234)
+
+ for testtuple in testtuples:
+ with self.assertRaises(ValueError):
+ parse_interval(testtuple, builder=None)
+
+ def test_parse_interval_baddelimiter(self):
+ testtuples = (
+ "1980-03-05T01:01:00,1981-04-05T01:01:00",
+ "P1M 1981-04-05T01:01:00",
+ )
+
+ for testtuple in testtuples:
+ with self.assertRaises(ISOFormatError):
+ parse_interval(testtuple, builder=None)
+
+ def test_parse_interval_badstr(self):
+ testtuples = ("/", "0/0/0", "20.50230/0", "5/%", "1/21", "bad", "")
+
+ for testtuple in testtuples:
+ with self.assertRaises(ISOFormatError):
+ parse_interval(testtuple, builder=None)
+
+ def test_parse_interval_repeating(self):
+ # Parse interval can't parse repeating intervals
+ with self.assertRaises(ISOFormatError):
+ parse_interval("R3/1981-04-05/P1D")
+
+ with self.assertRaises(ISOFormatError):
+ parse_interval("R3/1981-04-05/P0003-06-04T12:30:05.5")
+
+ with self.assertRaises(ISOFormatError):
+ parse_interval("R/PT1H2M/1980-03-05T01:01:00")
+
+ def test_parse_interval_suffixgarbage(self):
+ # Don't allow garbage after the duration
+ # https://bitbucket.org/nielsenb/aniso8601/issues/9/durations-with-trailing-garbage-are-parsed
+ with self.assertRaises(ISOFormatError):
+ parse_interval("2001/P1Dasdf", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_interval("P1Dasdf/2001", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_interval("2001/P0003-06-04T12:30:05.5asdfasdf", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_interval("P0003-06-04T12:30:05.5asdfasdf/2001", builder=None)
+
+ def test_parse_interval_internal(self):
+ # Test the internal _parse_interval function
+ testtuples = (
+ (
+ "P1M/1981-04-05T01:01:00",
+ {
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "duration": DurationTuple(None, "1", None, None, None, None, None),
+ },
+ ),
+ (
+ "P1M/1981-04-05",
+ {
+ "end": DateTuple("1981", "04", "05", None, None, None),
+ "duration": DurationTuple(None, "1", None, None, None, None, None),
+ },
+ ),
+ (
+ "P1,5Y/2018-03-06",
+ {
+ "end": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ "1.5", None, None, None, None, None, None
+ ),
+ },
+ ),
+ (
+ "P1.5Y/2018-03-06",
+ {
+ "end": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ "1.5", None, None, None, None, None, None
+ ),
+ },
+ ),
+ (
+ "PT1H/2014-11-12",
+ {
+ "end": DateTuple("2014", "11", "12", None, None, None),
+ "duration": DurationTuple(None, None, None, None, "1", None, None),
+ },
+ ),
+ (
+ "PT4H54M6.5S/2014-11-12",
+ {
+ "end": DateTuple("2014", "11", "12", None, None, None),
+ "duration": DurationTuple(None, None, None, None, "4", "54", "6.5"),
+ },
+ ),
+ # Make sure we truncate, not round
+ # https://bitbucket.org/nielsenb/aniso8601/issues/10/sub-microsecond-precision-in-durations-is
+ (
+ "PT0.0000001S/2018-03-06",
+ {
+ "end": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, None, None, "0.0000001"
+ ),
+ },
+ ),
+ (
+ "PT2.0000048S/2018-03-06",
+ {
+ "end": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, None, None, "2.0000048"
+ ),
+ },
+ ),
+ (
+ "1981-04-05T01:01:00/P1M1DT1M",
+ {
+ "start": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "duration": DurationTuple(None, "1", None, "1", None, "1", None),
+ },
+ ),
+ (
+ "1981-04-05/P1M1D",
+ {
+ "start": DateTuple("1981", "04", "05", None, None, None),
+ "duration": DurationTuple(None, "1", None, "1", None, None, None),
+ },
+ ),
+ (
+ "2018-03-06/P2,5M",
+ {
+ "start": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ None, "2.5", None, None, None, None, None
+ ),
+ },
+ ),
+ (
+ "2018-03-06/P2.5M",
+ {
+ "start": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ None, "2.5", None, None, None, None, None
+ ),
+ },
+ ),
+ (
+ "2014-11-12/PT1H",
+ {
+ "start": DateTuple("2014", "11", "12", None, None, None),
+ "duration": DurationTuple(None, None, None, None, "1", None, None),
+ },
+ ),
+ (
+ "2014-11-12/PT4H54M6.5S",
+ {
+ "start": DateTuple("2014", "11", "12", None, None, None),
+ "duration": DurationTuple(None, None, None, None, "4", "54", "6.5"),
+ },
+ ),
+ # Make sure we truncate, not round
+ # https://bitbucket.org/nielsenb/aniso8601/issues/10/sub-microsecond-precision-in-durations-is
+ (
+ "2018-03-06/PT0.0000001S",
+ {
+ "start": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, None, None, "0.0000001"
+ ),
+ },
+ ),
+ (
+ "2018-03-06/PT2.0000048S",
+ {
+ "start": DateTuple("2018", "03", "06", None, None, None),
+ "duration": DurationTuple(
+ None, None, None, None, None, None, "2.0000048"
+ ),
+ },
+ ),
+ (
+ "1980-03-05T01:01:00/1981-04-05T01:01:00",
+ {
+ "start": DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ },
+ ),
+ (
+ "1980-03-05T01:01:00/1981-04-05",
+ {
+ "start": DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "end": DateTuple("1981", "04", "05", None, None, None),
+ },
+ ),
+ (
+ "1980-03-05/1981-04-05T01:01:00",
+ {
+ "start": DateTuple("1980", "03", "05", None, None, None),
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ },
+ ),
+ (
+ "1980-03-05/1981-04-05",
+ {
+ "start": DateTuple("1980", "03", "05", None, None, None),
+ "end": DateTuple("1981", "04", "05", None, None, None),
+ },
+ ),
+ (
+ "1981-04-05/1980-03-05",
+ {
+ "start": DateTuple("1981", "04", "05", None, None, None),
+ "end": DateTuple("1980", "03", "05", None, None, None),
+ },
+ ),
+ # Test concise interval
+ (
+ "2020-01-01/02",
+ {
+ "start": DateTuple("2020", "01", "01", None, None, None),
+ "end": DateTuple(None, None, "02", None, None, None),
+ },
+ ),
+ (
+ "2008-02-15/03-14",
+ {
+ "start": DateTuple("2008", "02", "15", None, None, None),
+ "end": DateTuple(None, "03", "14", None, None, None),
+ },
+ ),
+ (
+ "2007-12-14T13:30/15:30",
+ {
+ "start": DatetimeTuple(
+ DateTuple("2007", "12", "14", None, None, None),
+ TimeTuple("13", "30", None, None),
+ ),
+ "end": TimeTuple("15", "30", None, None),
+ },
+ ),
+ (
+ "2007-11-13T09:00/15T17:00",
+ {
+ "start": DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple("09", "00", None, None),
+ ),
+ "end": DatetimeTuple(
+ DateTuple(None, None, "15", None, None, None),
+ TimeTuple("17", "00", None, None),
+ ),
+ },
+ ),
+ (
+ "2007-11-13T00:00/16T00:00",
+ {
+ "start": DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple("00", "00", None, None),
+ ),
+ "end": DatetimeTuple(
+ DateTuple(None, None, "16", None, None, None),
+ TimeTuple("00", "00", None, None),
+ ),
+ },
+ ),
+ (
+ "2007-11-13T09:00Z/15T17:00",
+ {
+ "start": DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple(
+ "09",
+ "00",
+ None,
+ TimezoneTuple(False, True, None, None, "Z"),
+ ),
+ ),
+ "end": DatetimeTuple(
+ DateTuple(None, None, "15", None, None, None),
+ TimeTuple("17", "00", None, None),
+ ),
+ },
+ ),
+ (
+ "2007-11-13T00:00/12:34.567",
+ {
+ "start": DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple("00", "00", None, None),
+ ),
+ "end": TimeTuple("12", "34.567", None, None),
+ },
+ ),
+ # Make sure we truncate, not round
+ # https://bitbucket.org/nielsenb/aniso8601/issues/10/sub-microsecond-precision-in-durations-is
+ (
+ "1980-03-05T01:01:00.0000001/" "1981-04-05T14:43:59.9999997",
+ {
+ "start": DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00.0000001", None),
+ ),
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("14", "43", "59.9999997", None),
+ ),
+ },
+ ),
+ )
+
+ for testtuple in testtuples:
+ mockBuilder = mock.Mock()
+ mockBuilder.build_interval.return_value = testtuple[1]
+
+ result = _parse_interval(testtuple[0], mockBuilder)
+
+ self.assertEqual(result, testtuple[1])
+ mockBuilder.build_interval.assert_called_once_with(**testtuple[1])
+
+ # Test different separators
+ expectedargs = {
+ "start": DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ }
+
+ mockBuilder = mock.Mock()
+ mockBuilder.build_interval.return_value = expectedargs
+
+ result = _parse_interval(
+ "1980-03-05T01:01:00--1981-04-05T01:01:00",
+ mockBuilder,
+ intervaldelimiter="--",
+ )
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.build_interval.assert_called_once_with(**expectedargs)
+
+ expectedargs = {
+ "start": DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ "end": DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ }
+
+ mockBuilder = mock.Mock()
+ mockBuilder.build_interval.return_value = expectedargs
+
+ _parse_interval(
+ "1980-03-05 01:01:00/1981-04-05 01:01:00",
+ mockBuilder,
+ datetimedelimiter=" ",
+ )
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.build_interval.assert_called_once_with(**expectedargs)
+
+ def test_parse_interval_end(self):
+ self.assertEqual(
+ _parse_interval_end(
+ "02", DateTuple("2020", "01", "01", None, None, None), "T"
+ ),
+ DateTuple(None, None, "02", None, None, None),
+ )
+
+ self.assertEqual(
+ _parse_interval_end(
+ "03-14", DateTuple("2008", "02", "15", None, None, None), "T"
+ ),
+ DateTuple(None, "03", "14", None, None, None),
+ )
+
+ self.assertEqual(
+ _parse_interval_end(
+ "0314", DateTuple("2008", "02", "15", None, None, None), "T"
+ ),
+ DateTuple(None, "03", "14", None, None, None),
+ )
+
+ self.assertEqual(
+ _parse_interval_end(
+ "15:30",
+ DatetimeTuple(
+ DateTuple("2007", "12", "14", None, None, None),
+ TimeTuple("13", "30", None, None),
+ ),
+ "T",
+ ),
+ TimeTuple("15", "30", None, None),
+ )
+
+ self.assertEqual(
+ _parse_interval_end(
+ "15T17:00",
+ DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple("09", "00", None, None),
+ ),
+ "T",
+ ),
+ DatetimeTuple(
+ DateTuple(None, None, "15", None, None, None),
+ TimeTuple("17", "00", None, None),
+ ),
+ )
+
+ self.assertEqual(
+ _parse_interval_end(
+ "16T00:00",
+ DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple("00", "00", None, None),
+ ),
+ "T",
+ ),
+ DatetimeTuple(
+ DateTuple(None, None, "16", None, None, None),
+ TimeTuple("00", "00", None, None),
+ ),
+ )
+
+ self.assertEqual(
+ _parse_interval_end(
+ "15 17:00",
+ DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple("09", "00", None, None),
+ ),
+ " ",
+ ),
+ DatetimeTuple(
+ DateTuple(None, None, "15", None, None, None),
+ TimeTuple("17", "00", None, None),
+ ),
+ )
+
+ self.assertEqual(
+ _parse_interval_end(
+ "12:34.567",
+ DatetimeTuple(
+ DateTuple("2007", "11", "13", None, None, None),
+ TimeTuple("00", "00", None, None),
+ ),
+ "T",
+ ),
+ TimeTuple("12", "34.567", None, None),
+ )
+
+
+class TestRepeatingIntervalParserFunctions(unittest.TestCase):
+ def test_get_interval_resolution_date(self):
+ self.assertEqual(
+ get_repeating_interval_resolution("R/P1.5Y/2018"), IntervalResolution.Year
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R1/P1.5Y/2018-03"),
+ IntervalResolution.Month,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R2/P1.5Y/2018-03-06"),
+ IntervalResolution.Day,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R3/P1.5Y/2018W01"),
+ IntervalResolution.Week,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R4/P1.5Y/2018-306"),
+ IntervalResolution.Ordinal,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R5/P1.5Y/2018W012"),
+ IntervalResolution.Weekday,
+ )
+
+ self.assertEqual(
+ get_repeating_interval_resolution("R/2018/P1.5Y"), IntervalResolution.Year
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R1/2018-03/P1.5Y"),
+ IntervalResolution.Month,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R2/2018-03-06/P1.5Y"),
+ IntervalResolution.Day,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R3/2018W01/P1.5Y"),
+ IntervalResolution.Week,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R4/2018-306/P1.5Y"),
+ IntervalResolution.Ordinal,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R5/2018W012/P1.5Y"),
+ IntervalResolution.Weekday,
+ )
+
+ def test_get_interval_resolution_time(self):
+ self.assertEqual(
+ get_repeating_interval_resolution("R/P1M/1981-04-05T01"),
+ IntervalResolution.Hours,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R1/P1M/1981-04-05T01:01"),
+ IntervalResolution.Minutes,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R2/P1M/1981-04-05T01:01:00"),
+ IntervalResolution.Seconds,
+ )
+
+ self.assertEqual(
+ get_repeating_interval_resolution("R/1981-04-05T01/P1M"),
+ IntervalResolution.Hours,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R1/1981-04-05T01:01/P1M"),
+ IntervalResolution.Minutes,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R2/1981-04-05T01:01:00/P1M"),
+ IntervalResolution.Seconds,
+ )
+
+ def test_get_interval_resolution_duration(self):
+ self.assertEqual(
+ get_repeating_interval_resolution("R/2014-11-12/P1Y2M3D"),
+ IntervalResolution.Day,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R1/2014-11-12/P1Y2M"),
+ IntervalResolution.Day,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R2/2014-11-12/P1Y"),
+ IntervalResolution.Day,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R3/2014-11-12/P1W"),
+ IntervalResolution.Day,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R4/2014-11-12/P1Y2M3DT4H"),
+ IntervalResolution.Hours,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R5/2014-11-12/P1Y2M3DT4H54M"),
+ IntervalResolution.Minutes,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R6/2014-11-12/P1Y2M3DT4H54M6S"),
+ IntervalResolution.Seconds,
+ )
+
+ self.assertEqual(
+ get_repeating_interval_resolution("R/P1Y2M3D/2014-11-12"),
+ IntervalResolution.Day,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R1/P1Y2M/2014-11-12"),
+ IntervalResolution.Day,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R2/P1Y/2014-11-12"),
+ IntervalResolution.Day,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R3/P1W/2014-11-12"),
+ IntervalResolution.Day,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R4/P1Y2M3DT4H/2014-11-12"),
+ IntervalResolution.Hours,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R5/P1Y2M3DT4H54M/2014-11-12"),
+ IntervalResolution.Minutes,
+ )
+ self.assertEqual(
+ get_repeating_interval_resolution("R6/P1Y2M3DT4H54M6S/2014-11-12"),
+ IntervalResolution.Seconds,
+ )
+
+ def test_parse_repeating_interval(self):
+ with mock.patch.object(
+ aniso8601.interval.PythonTimeBuilder, "build_repeating_interval"
+ ) as mockBuilder:
+ expectedargs = {
+ "R": False,
+ "Rnn": "3",
+ "interval": IntervalTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ None,
+ DurationTuple(None, None, None, "1", None, None, None),
+ ),
+ }
+
+ mockBuilder.return_value = expectedargs
+
+ result = parse_repeating_interval("R3/1981-04-05/P1D")
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.assert_called_once_with(**expectedargs)
+
+ with mock.patch.object(
+ aniso8601.interval.PythonTimeBuilder, "build_repeating_interval"
+ ) as mockBuilder:
+ expectedargs = {
+ "R": False,
+ "Rnn": "11",
+ "interval": IntervalTuple(
+ None,
+ DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ DurationTuple(None, None, None, None, "1", "2", None),
+ ),
+ }
+
+ mockBuilder.return_value = expectedargs
+
+ result = parse_repeating_interval("R11/PT1H2M/1980-03-05T01:01:00")
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.assert_called_once_with(**expectedargs)
+
+ with mock.patch.object(
+ aniso8601.interval.PythonTimeBuilder, "build_repeating_interval"
+ ) as mockBuilder:
+ expectedargs = {
+ "R": False,
+ "Rnn": "2",
+ "interval": IntervalTuple(
+ DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ None,
+ ),
+ }
+
+ mockBuilder.return_value = expectedargs
+
+ result = parse_repeating_interval(
+ "R2--1980-03-05T01:01:00--" "1981-04-05T01:01:00",
+ intervaldelimiter="--",
+ )
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.assert_called_once_with(**expectedargs)
+
+ with mock.patch.object(
+ aniso8601.interval.PythonTimeBuilder, "build_repeating_interval"
+ ) as mockBuilder:
+ expectedargs = {
+ "R": False,
+ "Rnn": "2",
+ "interval": IntervalTuple(
+ DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ DatetimeTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ None,
+ ),
+ }
+
+ mockBuilder.return_value = expectedargs
+
+ result = parse_repeating_interval(
+ "R2/" "1980-03-05 01:01:00/" "1981-04-05 01:01:00",
+ datetimedelimiter=" ",
+ )
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.assert_called_once_with(**expectedargs)
+
+ with mock.patch.object(
+ aniso8601.interval.PythonTimeBuilder, "build_repeating_interval"
+ ) as mockBuilder:
+ expectedargs = {
+ "R": True,
+ "Rnn": None,
+ "interval": IntervalTuple(
+ None,
+ DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ DurationTuple(None, None, None, None, "1", "2", None),
+ ),
+ }
+
+ mockBuilder.return_value = expectedargs
+
+ result = parse_repeating_interval("R/PT1H2M/1980-03-05T01:01:00")
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.assert_called_once_with(**expectedargs)
+
+ def test_parse_repeating_interval_mockbuilder(self):
+ mockBuilder = mock.Mock()
+
+ args = {
+ "R": False,
+ "Rnn": "3",
+ "interval": IntervalTuple(
+ DateTuple("1981", "04", "05", None, None, None),
+ None,
+ DurationTuple(None, None, None, "1", None, None, None),
+ ),
+ }
+
+ mockBuilder.build_repeating_interval.return_value = args
+
+ result = parse_repeating_interval("R3/1981-04-05/P1D", builder=mockBuilder)
+
+ self.assertEqual(result, args)
+ mockBuilder.build_repeating_interval.assert_called_once_with(**args)
+
+ mockBuilder = mock.Mock()
+
+ args = {
+ "R": False,
+ "Rnn": "11",
+ "interval": IntervalTuple(
+ None,
+ DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ DurationTuple(None, None, None, None, "1", "2", None),
+ ),
+ }
+
+ mockBuilder.build_repeating_interval.return_value = args
+
+ result = parse_repeating_interval(
+ "R11/PT1H2M/1980-03-05T01:01:00", builder=mockBuilder
+ )
+
+ self.assertEqual(result, args)
+ mockBuilder.build_repeating_interval.assert_called_once_with(**args)
+
+ mockBuilder = mock.Mock()
+
+ args = {
+ "R": True,
+ "Rnn": None,
+ "interval": IntervalTuple(
+ None,
+ DatetimeTuple(
+ DateTuple("1980", "03", "05", None, None, None),
+ TimeTuple("01", "01", "00", None),
+ ),
+ DurationTuple(None, None, None, None, "1", "2", None),
+ ),
+ }
+
+ mockBuilder.build_repeating_interval.return_value = args
+
+ result = parse_repeating_interval(
+ "R/PT1H2M/1980-03-05T01:01:00", builder=mockBuilder
+ )
+
+ self.assertEqual(result, args)
+ mockBuilder.build_repeating_interval.assert_called_once_with(**args)
+
+ def test_parse_repeating_interval_badtype(self):
+ testtuples = (None, 1, False, 1.234)
+
+ for testtuple in testtuples:
+ with self.assertRaises(ValueError):
+ parse_repeating_interval(testtuple, builder=None)
+
+ def test_parse_repeating_interval_baddelimiter(self):
+ testtuples = ("R,PT1H2M,1980-03-05T01:01:00", "R3 1981-04-05 P1D")
+
+ for testtuple in testtuples:
+ with self.assertRaises(ISOFormatError):
+ parse_repeating_interval(testtuple, builder=None)
+
+ def test_parse_repeating_interval_suffixgarbage(self):
+ # Don't allow garbage after the duration
+ # https://bitbucket.org/nielsenb/aniso8601/issues/9/durations-with-trailing-garbage-are-parsed
+ with self.assertRaises(ISOFormatError):
+ parse_repeating_interval("R3/1981-04-05/P1Dasdf", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_repeating_interval(
+ "R3/" "1981-04-05/" "P0003-06-04T12:30:05.5asdfasdf", builder=None
+ )
+
+ def test_parse_repeating_interval_badstr(self):
+ testtuples = ("bad", "")
+
+ for testtuple in testtuples:
+ with self.assertRaises(ISOFormatError):
+ parse_repeating_interval(testtuple, builder=None)
diff --git a/libs/aniso8601/tests/test_time.py b/libs/aniso8601/tests/test_time.py
new file mode 100644
index 000000000..dcee5e03f
--- /dev/null
+++ b/libs/aniso8601/tests/test_time.py
@@ -0,0 +1,539 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+import unittest
+
+import aniso8601
+from aniso8601.builders import DatetimeTuple, DateTuple, TimeTuple, TimezoneTuple
+from aniso8601.exceptions import ISOFormatError
+from aniso8601.resolution import TimeResolution
+from aniso8601.tests.compat import mock
+from aniso8601.time import (
+ _get_time_resolution,
+ get_datetime_resolution,
+ get_time_resolution,
+ parse_datetime,
+ parse_time,
+)
+
+
+class TestTimeResolutionFunctions(unittest.TestCase):
+ def test_get_time_resolution(self):
+ self.assertEqual(get_time_resolution("01:23:45"), TimeResolution.Seconds)
+ self.assertEqual(get_time_resolution("24:00:00"), TimeResolution.Seconds)
+ self.assertEqual(get_time_resolution("23:21:28,512400"), TimeResolution.Seconds)
+ self.assertEqual(get_time_resolution("23:21:28.512400"), TimeResolution.Seconds)
+ self.assertEqual(get_time_resolution("01:23"), TimeResolution.Minutes)
+ self.assertEqual(get_time_resolution("24:00"), TimeResolution.Minutes)
+ self.assertEqual(get_time_resolution("01:23,4567"), TimeResolution.Minutes)
+ self.assertEqual(get_time_resolution("01:23.4567"), TimeResolution.Minutes)
+ self.assertEqual(get_time_resolution("012345"), TimeResolution.Seconds)
+ self.assertEqual(get_time_resolution("240000"), TimeResolution.Seconds)
+ self.assertEqual(get_time_resolution("0123"), TimeResolution.Minutes)
+ self.assertEqual(get_time_resolution("2400"), TimeResolution.Minutes)
+ self.assertEqual(get_time_resolution("01"), TimeResolution.Hours)
+ self.assertEqual(get_time_resolution("24"), TimeResolution.Hours)
+ self.assertEqual(get_time_resolution("12,5"), TimeResolution.Hours)
+ self.assertEqual(get_time_resolution("12.5"), TimeResolution.Hours)
+ self.assertEqual(
+ get_time_resolution("232128.512400+00:00"), TimeResolution.Seconds
+ )
+ self.assertEqual(get_time_resolution("0123.4567+00:00"), TimeResolution.Minutes)
+ self.assertEqual(get_time_resolution("01.4567+00:00"), TimeResolution.Hours)
+ self.assertEqual(get_time_resolution("01:23:45+00:00"), TimeResolution.Seconds)
+ self.assertEqual(get_time_resolution("24:00:00+00:00"), TimeResolution.Seconds)
+ self.assertEqual(
+ get_time_resolution("23:21:28.512400+00:00"), TimeResolution.Seconds
+ )
+ self.assertEqual(get_time_resolution("01:23+00:00"), TimeResolution.Minutes)
+ self.assertEqual(get_time_resolution("24:00+00:00"), TimeResolution.Minutes)
+ self.assertEqual(
+ get_time_resolution("01:23.4567+00:00"), TimeResolution.Minutes
+ )
+ self.assertEqual(
+ get_time_resolution("23:21:28.512400+11:15"), TimeResolution.Seconds
+ )
+ self.assertEqual(
+ get_time_resolution("23:21:28.512400-12:34"), TimeResolution.Seconds
+ )
+ self.assertEqual(
+ get_time_resolution("23:21:28.512400Z"), TimeResolution.Seconds
+ )
+ self.assertEqual(
+ get_time_resolution("06:14:00.000123Z"), TimeResolution.Seconds
+ )
+
+ def test_get_datetime_resolution(self):
+ self.assertEqual(
+ get_datetime_resolution("2019-06-05T01:03:11.858714"),
+ TimeResolution.Seconds,
+ )
+ self.assertEqual(
+ get_datetime_resolution("2019-06-05T01:03:11"), TimeResolution.Seconds
+ )
+ self.assertEqual(
+ get_datetime_resolution("2019-06-05T01:03"), TimeResolution.Minutes
+ )
+ self.assertEqual(get_datetime_resolution("2019-06-05T01"), TimeResolution.Hours)
+
+ def test_get_time_resolution_badtype(self):
+ testtuples = (None, 1, False, 1.234)
+
+ for testtuple in testtuples:
+ with self.assertRaises(ValueError):
+ get_time_resolution(testtuple)
+
+ def test_get_time_resolution_badstr(self):
+ testtuples = ("A6:14:00.000123Z", "06:14:0B", "bad", "")
+
+ for testtuple in testtuples:
+ with self.assertRaises(ISOFormatError):
+ get_time_resolution(testtuple)
+
+ def test_get_time_resolution_internal(self):
+ self.assertEqual(
+ _get_time_resolution(TimeTuple(hh="01", mm="02", ss="03", tz=None)),
+ TimeResolution.Seconds,
+ )
+ self.assertEqual(
+ _get_time_resolution(TimeTuple(hh="01", mm="02", ss=None, tz=None)),
+ TimeResolution.Minutes,
+ )
+ self.assertEqual(
+ _get_time_resolution(TimeTuple(hh="01", mm=None, ss=None, tz=None)),
+ TimeResolution.Hours,
+ )
+
+
+class TestTimeParserFunctions(unittest.TestCase):
+ def test_parse_time(self):
+ testtuples = (
+ ("01:23:45", {"hh": "01", "mm": "23", "ss": "45", "tz": None}),
+ ("24:00:00", {"hh": "24", "mm": "00", "ss": "00", "tz": None}),
+ (
+ "23:21:28,512400",
+ {"hh": "23", "mm": "21", "ss": "28.512400", "tz": None},
+ ),
+ (
+ "23:21:28.512400",
+ {"hh": "23", "mm": "21", "ss": "28.512400", "tz": None},
+ ),
+ (
+ "01:03:11.858714",
+ {"hh": "01", "mm": "03", "ss": "11.858714", "tz": None},
+ ),
+ (
+ "14:43:59.9999997",
+ {"hh": "14", "mm": "43", "ss": "59.9999997", "tz": None},
+ ),
+ ("01:23", {"hh": "01", "mm": "23", "ss": None, "tz": None}),
+ ("24:00", {"hh": "24", "mm": "00", "ss": None, "tz": None}),
+ ("01:23,4567", {"hh": "01", "mm": "23.4567", "ss": None, "tz": None}),
+ ("01:23.4567", {"hh": "01", "mm": "23.4567", "ss": None, "tz": None}),
+ ("012345", {"hh": "01", "mm": "23", "ss": "45", "tz": None}),
+ ("240000", {"hh": "24", "mm": "00", "ss": "00", "tz": None}),
+ ("232128,512400", {"hh": "23", "mm": "21", "ss": "28.512400", "tz": None}),
+ ("232128.512400", {"hh": "23", "mm": "21", "ss": "28.512400", "tz": None}),
+ ("010311.858714", {"hh": "01", "mm": "03", "ss": "11.858714", "tz": None}),
+ (
+ "144359.9999997",
+ {"hh": "14", "mm": "43", "ss": "59.9999997", "tz": None},
+ ),
+ ("0123", {"hh": "01", "mm": "23", "ss": None, "tz": None}),
+ ("2400", {"hh": "24", "mm": "00", "ss": None, "tz": None}),
+ ("01", {"hh": "01", "mm": None, "ss": None, "tz": None}),
+ ("24", {"hh": "24", "mm": None, "ss": None, "tz": None}),
+ ("12,5", {"hh": "12.5", "mm": None, "ss": None, "tz": None}),
+ ("12.5", {"hh": "12.5", "mm": None, "ss": None, "tz": None}),
+ (
+ "232128,512400+00:00",
+ {
+ "hh": "23",
+ "mm": "21",
+ "ss": "28.512400",
+ "tz": TimezoneTuple(False, None, "00", "00", "+00:00"),
+ },
+ ),
+ (
+ "232128.512400+00:00",
+ {
+ "hh": "23",
+ "mm": "21",
+ "ss": "28.512400",
+ "tz": TimezoneTuple(False, None, "00", "00", "+00:00"),
+ },
+ ),
+ (
+ "0123,4567+00:00",
+ {
+ "hh": "01",
+ "mm": "23.4567",
+ "ss": None,
+ "tz": TimezoneTuple(False, None, "00", "00", "+00:00"),
+ },
+ ),
+ (
+ "0123.4567+00:00",
+ {
+ "hh": "01",
+ "mm": "23.4567",
+ "ss": None,
+ "tz": TimezoneTuple(False, None, "00", "00", "+00:00"),
+ },
+ ),
+ (
+ "01,4567+00:00",
+ {
+ "hh": "01.4567",
+ "mm": None,
+ "ss": None,
+ "tz": TimezoneTuple(False, None, "00", "00", "+00:00"),
+ },
+ ),
+ (
+ "01.4567+00:00",
+ {
+ "hh": "01.4567",
+ "mm": None,
+ "ss": None,
+ "tz": TimezoneTuple(False, None, "00", "00", "+00:00"),
+ },
+ ),
+ (
+ "01:23:45+00:00",
+ {
+ "hh": "01",
+ "mm": "23",
+ "ss": "45",
+ "tz": TimezoneTuple(False, None, "00", "00", "+00:00"),
+ },
+ ),
+ (
+ "24:00:00+00:00",
+ {
+ "hh": "24",
+ "mm": "00",
+ "ss": "00",
+ "tz": TimezoneTuple(False, None, "00", "00", "+00:00"),
+ },
+ ),
+ (
+ "23:21:28.512400+00:00",
+ {
+ "hh": "23",
+ "mm": "21",
+ "ss": "28.512400",
+ "tz": TimezoneTuple(False, None, "00", "00", "+00:00"),
+ },
+ ),
+ (
+ "01:23+00:00",
+ {
+ "hh": "01",
+ "mm": "23",
+ "ss": None,
+ "tz": TimezoneTuple(False, None, "00", "00", "+00:00"),
+ },
+ ),
+ (
+ "24:00+00:00",
+ {
+ "hh": "24",
+ "mm": "00",
+ "ss": None,
+ "tz": TimezoneTuple(False, None, "00", "00", "+00:00"),
+ },
+ ),
+ (
+ "01:23.4567+00:00",
+ {
+ "hh": "01",
+ "mm": "23.4567",
+ "ss": None,
+ "tz": TimezoneTuple(False, None, "00", "00", "+00:00"),
+ },
+ ),
+ (
+ "23:21:28.512400+11:15",
+ {
+ "hh": "23",
+ "mm": "21",
+ "ss": "28.512400",
+ "tz": TimezoneTuple(False, None, "11", "15", "+11:15"),
+ },
+ ),
+ (
+ "23:21:28.512400-12:34",
+ {
+ "hh": "23",
+ "mm": "21",
+ "ss": "28.512400",
+ "tz": TimezoneTuple(True, None, "12", "34", "-12:34"),
+ },
+ ),
+ (
+ "23:21:28.512400Z",
+ {
+ "hh": "23",
+ "mm": "21",
+ "ss": "28.512400",
+ "tz": TimezoneTuple(False, True, None, None, "Z"),
+ },
+ ),
+ (
+ "06:14:00.000123Z",
+ {
+ "hh": "06",
+ "mm": "14",
+ "ss": "00.000123",
+ "tz": TimezoneTuple(False, True, None, None, "Z"),
+ },
+ ),
+ )
+
+ for testtuple in testtuples:
+ with mock.patch.object(
+ aniso8601.time.PythonTimeBuilder, "build_time"
+ ) as mockBuildTime:
+
+ mockBuildTime.return_value = testtuple[1]
+
+ result = parse_time(testtuple[0])
+
+ self.assertEqual(result, testtuple[1])
+ mockBuildTime.assert_called_once_with(**testtuple[1])
+
+ def test_parse_time_badtype(self):
+ testtuples = (None, 1, False, 1.234)
+
+ for testtuple in testtuples:
+ with self.assertRaises(ValueError):
+ parse_time(testtuple, builder=None)
+
+ def test_parse_time_badstr(self):
+ testtuples = (
+ "A6:14:00.000123Z",
+ "06:14:0B",
+ "06:1 :02",
+ "0000,70:24,9",
+ "00.27:5332",
+ "bad",
+ "",
+ )
+
+ for testtuple in testtuples:
+ with self.assertRaises(ISOFormatError):
+ parse_time(testtuple, builder=None)
+
+ def test_parse_time_mockbuilder(self):
+ mockBuilder = mock.Mock()
+
+ expectedargs = {"hh": "01", "mm": "23", "ss": "45", "tz": None}
+
+ mockBuilder.build_time.return_value = expectedargs
+
+ result = parse_time("01:23:45", builder=mockBuilder)
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.build_time.assert_called_once_with(**expectedargs)
+
+ mockBuilder = mock.Mock()
+
+ expectedargs = {
+ "hh": "23",
+ "mm": "21",
+ "ss": "28.512400",
+ "tz": TimezoneTuple(False, None, "00", "00", "+00:00"),
+ }
+
+ mockBuilder.build_time.return_value = expectedargs
+
+ result = parse_time("232128.512400+00:00", builder=mockBuilder)
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.build_time.assert_called_once_with(**expectedargs)
+
+ mockBuilder = mock.Mock()
+
+ expectedargs = {
+ "hh": "23",
+ "mm": "21",
+ "ss": "28.512400",
+ "tz": TimezoneTuple(False, None, "11", "15", "+11:15"),
+ }
+
+ mockBuilder.build_time.return_value = expectedargs
+
+ result = parse_time("23:21:28.512400+11:15", builder=mockBuilder)
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.build_time.assert_called_once_with(**expectedargs)
+
+ def test_parse_datetime(self):
+ testtuples = (
+ (
+ "2019-06-05T01:03:11,858714",
+ (
+ DateTuple("2019", "06", "05", None, None, None),
+ TimeTuple("01", "03", "11.858714", None),
+ ),
+ ),
+ (
+ "2019-06-05T01:03:11.858714",
+ (
+ DateTuple("2019", "06", "05", None, None, None),
+ TimeTuple("01", "03", "11.858714", None),
+ ),
+ ),
+ (
+ "1981-04-05T23:21:28.512400Z",
+ (
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple(
+ "23",
+ "21",
+ "28.512400",
+ TimezoneTuple(False, True, None, None, "Z"),
+ ),
+ ),
+ ),
+ (
+ "1981095T23:21:28.512400-12:34",
+ (
+ DateTuple("1981", None, None, None, None, "095"),
+ TimeTuple(
+ "23",
+ "21",
+ "28.512400",
+ TimezoneTuple(True, None, "12", "34", "-12:34"),
+ ),
+ ),
+ ),
+ (
+ "19810405T23:21:28+00",
+ (
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple(
+ "23", "21", "28", TimezoneTuple(False, None, "00", None, "+00")
+ ),
+ ),
+ ),
+ (
+ "19810405T23:21:28+00:00",
+ (
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple(
+ "23",
+ "21",
+ "28",
+ TimezoneTuple(False, None, "00", "00", "+00:00"),
+ ),
+ ),
+ ),
+ )
+
+ for testtuple in testtuples:
+ with mock.patch.object(
+ aniso8601.time.PythonTimeBuilder, "build_datetime"
+ ) as mockBuildDateTime:
+
+ mockBuildDateTime.return_value = testtuple[1]
+
+ result = parse_datetime(testtuple[0])
+
+ self.assertEqual(result, testtuple[1])
+ mockBuildDateTime.assert_called_once_with(*testtuple[1])
+
+ def test_parse_datetime_spacedelimited(self):
+ expectedargs = (
+ DateTuple("2004", None, None, "53", "6", None),
+ TimeTuple(
+ "23", "21", "28.512400", TimezoneTuple(True, None, "12", "34", "-12:34")
+ ),
+ )
+
+ with mock.patch.object(
+ aniso8601.time.PythonTimeBuilder, "build_datetime"
+ ) as mockBuildDateTime:
+
+ mockBuildDateTime.return_value = expectedargs
+
+ result = parse_datetime("2004-W53-6 23:21:28.512400-12:34", delimiter=" ")
+
+ self.assertEqual(result, expectedargs)
+ mockBuildDateTime.assert_called_once_with(*expectedargs)
+
+ def test_parse_datetime_commadelimited(self):
+ expectedargs = (
+ DateTuple("1981", "04", "05", None, None, None),
+ TimeTuple(
+ "23", "21", "28.512400", TimezoneTuple(False, True, None, None, "Z")
+ ),
+ )
+
+ with mock.patch.object(
+ aniso8601.time.PythonTimeBuilder, "build_datetime"
+ ) as mockBuildDateTime:
+
+ mockBuildDateTime.return_value = expectedargs
+
+ result = parse_datetime("1981-04-05,23:21:28,512400Z", delimiter=",")
+
+ self.assertEqual(result, expectedargs)
+ mockBuildDateTime.assert_called_once_with(*expectedargs)
+
+ def test_parse_datetime_baddelimiter(self):
+ testtuples = (
+ "1981-04-05,23:21:28,512400Z",
+ "2004-W53-6 23:21:28.512400-12:3",
+ "1981040523:21:28",
+ )
+
+ for testtuple in testtuples:
+ with self.assertRaises(ISOFormatError):
+ parse_datetime(testtuple, builder=None)
+
+ def test_parse_datetime_badtype(self):
+ testtuples = (None, 1, False, 1.234)
+
+ for testtuple in testtuples:
+ with self.assertRaises(ValueError):
+ parse_datetime(testtuple, builder=None)
+
+ def test_parse_datetime_badstr(self):
+ testtuples = (
+ "1981-04-05TA6:14:00.000123Z",
+ "2004-W53-6T06:14:0B",
+ "2014-01-230T23:21:28+00",
+ "201401230T01:03:11.858714",
+ "9999 W53T49",
+ "9T0000,70:24,9",
+ "bad",
+ "",
+ )
+
+ for testtuple in testtuples:
+ with self.assertRaises(ISOFormatError):
+ parse_datetime(testtuple, builder=None)
+
+ def test_parse_datetime_mockbuilder(self):
+ mockBuilder = mock.Mock()
+
+ expectedargs = (
+ DateTuple("1981", None, None, None, None, "095"),
+ TimeTuple(
+ "23", "21", "28.512400", TimezoneTuple(True, None, "12", "34", "-12:34")
+ ),
+ )
+
+ mockBuilder.build_datetime.return_value = expectedargs
+
+ result = parse_datetime("1981095T23:21:28.512400-12:34", builder=mockBuilder)
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.build_datetime.assert_called_once_with(*expectedargs)
diff --git a/libs/aniso8601/tests/test_timezone.py b/libs/aniso8601/tests/test_timezone.py
new file mode 100644
index 000000000..5df9671f1
--- /dev/null
+++ b/libs/aniso8601/tests/test_timezone.py
@@ -0,0 +1,123 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+import unittest
+
+import aniso8601
+from aniso8601.exceptions import ISOFormatError
+from aniso8601.tests.compat import mock
+from aniso8601.timezone import parse_timezone
+
+
+class TestTimezoneParserFunctions(unittest.TestCase):
+ def test_parse_timezone(self):
+ testtuples = (
+ ("Z", {"negative": False, "Z": True, "name": "Z"}),
+ ("+00:00", {"negative": False, "hh": "00", "mm": "00", "name": "+00:00"}),
+ ("+01:00", {"negative": False, "hh": "01", "mm": "00", "name": "+01:00"}),
+ ("-01:00", {"negative": True, "hh": "01", "mm": "00", "name": "-01:00"}),
+ ("+00:12", {"negative": False, "hh": "00", "mm": "12", "name": "+00:12"}),
+ ("+01:23", {"negative": False, "hh": "01", "mm": "23", "name": "+01:23"}),
+ ("-01:23", {"negative": True, "hh": "01", "mm": "23", "name": "-01:23"}),
+ ("+0000", {"negative": False, "hh": "00", "mm": "00", "name": "+0000"}),
+ ("+0100", {"negative": False, "hh": "01", "mm": "00", "name": "+0100"}),
+ ("-0100", {"negative": True, "hh": "01", "mm": "00", "name": "-0100"}),
+ ("+0012", {"negative": False, "hh": "00", "mm": "12", "name": "+0012"}),
+ ("+0123", {"negative": False, "hh": "01", "mm": "23", "name": "+0123"}),
+ ("-0123", {"negative": True, "hh": "01", "mm": "23", "name": "-0123"}),
+ ("+00", {"negative": False, "hh": "00", "mm": None, "name": "+00"}),
+ ("+01", {"negative": False, "hh": "01", "mm": None, "name": "+01"}),
+ ("-01", {"negative": True, "hh": "01", "mm": None, "name": "-01"}),
+ ("+12", {"negative": False, "hh": "12", "mm": None, "name": "+12"}),
+ ("-12", {"negative": True, "hh": "12", "mm": None, "name": "-12"}),
+ )
+
+ for testtuple in testtuples:
+ with mock.patch.object(
+ aniso8601.timezone.PythonTimeBuilder, "build_timezone"
+ ) as mockBuildTimezone:
+
+ mockBuildTimezone.return_value = testtuple[1]
+
+ result = parse_timezone(testtuple[0])
+
+ self.assertEqual(result, testtuple[1])
+ mockBuildTimezone.assert_called_once_with(**testtuple[1])
+
+ def test_parse_timezone_badtype(self):
+ testtuples = (None, 1, False, 1.234)
+
+ for testtuple in testtuples:
+ with self.assertRaises(ValueError):
+ parse_timezone(testtuple, builder=None)
+
+ def test_parse_timezone_badstr(self):
+ testtuples = (
+ "+1",
+ "-00",
+ "-0000",
+ "-00:00",
+ "01",
+ "0123",
+ "@12:34",
+ "Y",
+ " Z",
+ "Z ",
+ " Z ",
+ "bad",
+ "",
+ )
+
+ for testtuple in testtuples:
+ with self.assertRaises(ISOFormatError):
+ parse_timezone(testtuple, builder=None)
+
+ def test_parse_timezone_mockbuilder(self):
+ mockBuilder = mock.Mock()
+
+ expectedargs = {"negative": False, "Z": True, "name": "Z"}
+
+ mockBuilder.build_timezone.return_value = expectedargs
+
+ result = parse_timezone("Z", builder=mockBuilder)
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.build_timezone.assert_called_once_with(**expectedargs)
+
+ mockBuilder = mock.Mock()
+
+ expectedargs = {"negative": False, "hh": "00", "mm": "00", "name": "+00:00"}
+
+ mockBuilder.build_timezone.return_value = expectedargs
+
+ result = parse_timezone("+00:00", builder=mockBuilder)
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.build_timezone.assert_called_once_with(**expectedargs)
+
+ mockBuilder = mock.Mock()
+
+ expectedargs = {"negative": True, "hh": "01", "mm": "23", "name": "-01:23"}
+
+ mockBuilder.build_timezone.return_value = expectedargs
+
+ result = parse_timezone("-01:23", builder=mockBuilder)
+
+ self.assertEqual(result, expectedargs)
+ mockBuilder.build_timezone.assert_called_once_with(**expectedargs)
+
+ def test_parse_timezone_negativezero(self):
+ # A 0 offset cannot be negative
+ with self.assertRaises(ISOFormatError):
+ parse_timezone("-00:00", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_timezone("-0000", builder=None)
+
+ with self.assertRaises(ISOFormatError):
+ parse_timezone("-00", builder=None)
diff --git a/libs/aniso8601/tests/test_utcoffset.py b/libs/aniso8601/tests/test_utcoffset.py
new file mode 100644
index 000000000..11fa4f7d9
--- /dev/null
+++ b/libs/aniso8601/tests/test_utcoffset.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+import datetime
+import pickle
+import unittest
+
+from aniso8601.utcoffset import UTCOffset
+
+
+class TestUTCOffset(unittest.TestCase):
+ def test_pickle(self):
+ # Make sure timezone objects are pickleable
+ testutcoffset = UTCOffset(name="UTC", minutes=0)
+
+ utcoffsetpickle = pickle.dumps(testutcoffset)
+
+ resultutcoffset = pickle.loads(utcoffsetpickle)
+
+ self.assertEqual(resultutcoffset._name, testutcoffset._name)
+ self.assertEqual(resultutcoffset._utcdelta, testutcoffset._utcdelta)
+
+ def test_repr(self):
+ self.assertEqual(str(UTCOffset(minutes=0)), "+0:00:00 UTC")
+
+ self.assertEqual(str(UTCOffset(minutes=60)), "+1:00:00 UTC")
+
+ self.assertEqual(str(UTCOffset(minutes=-60)), "-1:00:00 UTC")
+
+ self.assertEqual(str(UTCOffset(minutes=12)), "+0:12:00 UTC")
+
+ self.assertEqual(str(UTCOffset(minutes=-12)), "-0:12:00 UTC")
+
+ self.assertEqual(str(UTCOffset(minutes=83)), "+1:23:00 UTC")
+
+ self.assertEqual(str(UTCOffset(minutes=-83)), "-1:23:00 UTC")
+
+ self.assertEqual(str(UTCOffset(minutes=1440)), "+1 day, 0:00:00 UTC")
+
+ self.assertEqual(str(UTCOffset(minutes=-1440)), "-1 day, 0:00:00 UTC")
+
+ self.assertEqual(str(UTCOffset(minutes=2967)), "+2 days, 1:27:00 UTC")
+
+ self.assertEqual(str(UTCOffset(minutes=-2967)), "-2 days, 1:27:00 UTC")
+
+ def test_dst(self):
+ tzinfoobject = UTCOffset(minutes=240)
+ # This would raise ISOFormatError or a TypeError if dst info is invalid
+ result = datetime.datetime.now(tzinfoobject)
+ # Hacky way to make sure the tzinfo is what we'd expect
+ self.assertEqual(result.tzinfo.utcoffset(None), datetime.timedelta(hours=4))
diff --git a/libs/aniso8601/time.py b/libs/aniso8601/time.py
new file mode 100644
index 000000000..31fab048e
--- /dev/null
+++ b/libs/aniso8601/time.py
@@ -0,0 +1,203 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+from aniso8601.builders import TupleBuilder
+from aniso8601.builders.python import PythonTimeBuilder
+from aniso8601.compat import is_string
+from aniso8601.date import parse_date
+from aniso8601.decimalfraction import normalize
+from aniso8601.exceptions import ISOFormatError
+from aniso8601.resolution import TimeResolution
+from aniso8601.timezone import parse_timezone
+
+TIMEZONE_DELIMITERS = ["Z", "+", "-"]
+
+
+def get_time_resolution(isotimestr):
+ # Valid time formats are:
+ #
+ # hh:mm:ss
+ # hhmmss
+ # hh:mm
+ # hhmm
+ # hh
+ # hh:mm:ssZ
+ # hhmmssZ
+ # hh:mmZ
+ # hhmmZ
+ # hhZ
+ # hh:mm:ss±hh:mm
+ # hhmmss±hh:mm
+ # hh:mm±hh:mm
+ # hhmm±hh:mm
+ # hh±hh:mm
+ # hh:mm:ss±hhmm
+ # hhmmss±hhmm
+ # hh:mm±hhmm
+ # hhmm±hhmm
+ # hh±hhmm
+ # hh:mm:ss±hh
+ # hhmmss±hh
+ # hh:mm±hh
+ # hhmm±hh
+ # hh±hh
+ isotimetuple = parse_time(isotimestr, builder=TupleBuilder)
+
+ return _get_time_resolution(isotimetuple)
+
+
+def get_datetime_resolution(isodatetimestr, delimiter="T"):
+ # <date>T<time>
+ #
+ # Time part cannot be omittted so return time resolution
+ isotimetuple = parse_datetime(
+ isodatetimestr, delimiter=delimiter, builder=TupleBuilder
+ ).time
+
+ return _get_time_resolution(isotimetuple)
+
+
+def _get_time_resolution(isotimetuple):
+ if isotimetuple.ss is not None:
+ return TimeResolution.Seconds
+
+ if isotimetuple.mm is not None:
+ return TimeResolution.Minutes
+
+ return TimeResolution.Hours
+
+
+def parse_time(isotimestr, builder=PythonTimeBuilder):
+ # Given a string in any ISO 8601 time format, return a datetime.time object
+ # that corresponds to the given time. Fixed offset tzdata will be included
+ # if UTC offset is given in the input string. Valid time formats are:
+ #
+ # hh:mm:ss
+ # hhmmss
+ # hh:mm
+ # hhmm
+ # hh
+ # hh:mm:ssZ
+ # hhmmssZ
+ # hh:mmZ
+ # hhmmZ
+ # hhZ
+ # hh:mm:ss±hh:mm
+ # hhmmss±hh:mm
+ # hh:mm±hh:mm
+ # hhmm±hh:mm
+ # hh±hh:mm
+ # hh:mm:ss±hhmm
+ # hhmmss±hhmm
+ # hh:mm±hhmm
+ # hhmm±hhmm
+ # hh±hhmm
+ # hh:mm:ss±hh
+ # hhmmss±hh
+ # hh:mm±hh
+ # hhmm±hh
+ # hh±hh
+ if is_string(isotimestr) is False:
+ raise ValueError("Time must be string.")
+
+ if len(isotimestr) == 0:
+ raise ISOFormatError('"{0}" is not a valid ISO 8601 time.'.format(isotimestr))
+
+ timestr = normalize(isotimestr)
+
+ hourstr = None
+ minutestr = None
+ secondstr = None
+ tzstr = None
+
+ fractionalstr = None
+
+ # Split out the timezone
+ for delimiter in TIMEZONE_DELIMITERS:
+ delimiteridx = timestr.find(delimiter)
+
+ if delimiteridx != -1:
+ tzstr = timestr[delimiteridx:]
+ timestr = timestr[0:delimiteridx]
+
+ # Split out the fractional component
+ if timestr.find(".") != -1:
+ timestr, fractionalstr = timestr.split(".", 1)
+
+ if fractionalstr.isdigit() is False:
+ raise ISOFormatError(
+ '"{0}" is not a valid ISO 8601 time.'.format(isotimestr)
+ )
+
+ if len(timestr) == 2:
+ # hh
+ hourstr = timestr
+ elif len(timestr) == 4 or len(timestr) == 5:
+ # hh:mm
+ # hhmm
+ if timestr.count(":") == 1:
+ hourstr, minutestr = timestr.split(":")
+ else:
+ hourstr = timestr[0:2]
+ minutestr = timestr[2:]
+ elif len(timestr) == 6 or len(timestr) == 8:
+ # hh:mm:ss
+ # hhmmss
+ if timestr.count(":") == 2:
+ hourstr, minutestr, secondstr = timestr.split(":")
+ else:
+ hourstr = timestr[0:2]
+ minutestr = timestr[2:4]
+ secondstr = timestr[4:]
+ else:
+ raise ISOFormatError('"{0}" is not a valid ISO 8601 time.'.format(isotimestr))
+
+ for componentstr in [hourstr, minutestr, secondstr]:
+ if componentstr is not None and componentstr.isdigit() is False:
+ raise ISOFormatError(
+ '"{0}" is not a valid ISO 8601 time.'.format(isotimestr)
+ )
+
+ if fractionalstr is not None:
+ if secondstr is not None:
+ secondstr = secondstr + "." + fractionalstr
+ elif minutestr is not None:
+ minutestr = minutestr + "." + fractionalstr
+ else:
+ hourstr = hourstr + "." + fractionalstr
+
+ if tzstr is None:
+ tz = None
+ else:
+ tz = parse_timezone(tzstr, builder=TupleBuilder)
+
+ return builder.build_time(hh=hourstr, mm=minutestr, ss=secondstr, tz=tz)
+
+
+def parse_datetime(isodatetimestr, delimiter="T", builder=PythonTimeBuilder):
+ # Given a string in ISO 8601 date time format, return a datetime.datetime
+ # object that corresponds to the given date time.
+ # By default, the ISO 8601 specified T delimiter is used to split the
+ # date and time (<date>T<time>). Fixed offset tzdata will be included
+ # if UTC offset is given in the input string.
+ if is_string(isodatetimestr) is False:
+ raise ValueError("Date time must be string.")
+
+ if delimiter not in isodatetimestr:
+ raise ISOFormatError(
+ 'Delimiter "{0}" is not in combined date time '
+ 'string "{1}".'.format(delimiter, isodatetimestr)
+ )
+
+ isodatestr, isotimestr = isodatetimestr.split(delimiter, 1)
+
+ datepart = parse_date(isodatestr, builder=TupleBuilder)
+
+ timepart = parse_time(isotimestr, builder=TupleBuilder)
+
+ return builder.build_datetime(datepart, timepart)
diff --git a/libs/aniso8601/timezone.py b/libs/aniso8601/timezone.py
new file mode 100644
index 000000000..3b66105bd
--- /dev/null
+++ b/libs/aniso8601/timezone.py
@@ -0,0 +1,62 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+from aniso8601.builders.python import PythonTimeBuilder
+from aniso8601.compat import is_string
+from aniso8601.exceptions import ISOFormatError
+
+
+def parse_timezone(tzstr, builder=PythonTimeBuilder):
+ # tzstr can be Z, ±hh:mm, ±hhmm, ±hh
+ if is_string(tzstr) is False:
+ raise ValueError("Time zone must be string.")
+
+ if len(tzstr) == 1 and tzstr[0] == "Z":
+ return builder.build_timezone(negative=False, Z=True, name=tzstr)
+ elif len(tzstr) == 6:
+ # ±hh:mm
+ hourstr = tzstr[1:3]
+ minutestr = tzstr[4:6]
+
+ if tzstr[0] == "-" and hourstr == "00" and minutestr == "00":
+ raise ISOFormatError("Negative ISO 8601 time offset must not " "be 0.")
+ elif len(tzstr) == 5:
+ # ±hhmm
+ hourstr = tzstr[1:3]
+ minutestr = tzstr[3:5]
+
+ if tzstr[0] == "-" and hourstr == "00" and minutestr == "00":
+ raise ISOFormatError("Negative ISO 8601 time offset must not " "be 0.")
+ elif len(tzstr) == 3:
+ # ±hh
+ hourstr = tzstr[1:3]
+ minutestr = None
+
+ if tzstr[0] == "-" and hourstr == "00":
+ raise ISOFormatError("Negative ISO 8601 time offset must not " "be 0.")
+ else:
+ raise ISOFormatError('"{0}" is not a valid ISO 8601 time offset.'.format(tzstr))
+
+ for componentstr in [hourstr, minutestr]:
+ if componentstr is not None:
+ if componentstr.isdigit() is False:
+ raise ISOFormatError(
+ '"{0}" is not a valid ISO 8601 time offset.'.format(tzstr)
+ )
+
+ if tzstr[0] == "+":
+ return builder.build_timezone(
+ negative=False, hh=hourstr, mm=minutestr, name=tzstr
+ )
+
+ if tzstr[0] == "-":
+ return builder.build_timezone(
+ negative=True, hh=hourstr, mm=minutestr, name=tzstr
+ )
+
+ raise ISOFormatError('"{0}" is not a valid ISO 8601 time offset.'.format(tzstr))
diff --git a/libs/aniso8601/utcoffset.py b/libs/aniso8601/utcoffset.py
new file mode 100644
index 000000000..11872a01e
--- /dev/null
+++ b/libs/aniso8601/utcoffset.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Brandon Nielsen
+# All rights reserved.
+#
+# This software may be modified and distributed under the terms
+# of the BSD license. See the LICENSE file for details.
+
+import datetime
+
+
+class UTCOffset(datetime.tzinfo):
+ def __init__(self, name=None, minutes=None):
+ # We build an offset in this manner since the
+ # tzinfo class must have an init
+ # "method that can be called with no arguments"
+ self._name = name
+
+ if minutes is not None:
+ self._utcdelta = datetime.timedelta(minutes=minutes)
+ else:
+ self._utcdelta = None
+
+ def __repr__(self):
+ if self._utcdelta >= datetime.timedelta(hours=0):
+ return "+{0} UTC".format(self._utcdelta)
+
+ # From the docs:
+ # String representations of timedelta objects are normalized
+ # similarly to their internal representation. This leads to
+ # somewhat unusual results for negative timedeltas.
+
+ # Clean this up for printing purposes
+ # Negative deltas start at -1 day
+ correcteddays = abs(self._utcdelta.days + 1)
+
+ # Negative deltas have a positive seconds
+ deltaseconds = (24 * 60 * 60) - self._utcdelta.seconds
+
+ # (24 hours / day) * (60 minutes / hour) * (60 seconds / hour)
+ days, remainder = divmod(deltaseconds, 24 * 60 * 60)
+
+ # (1 hour) * (60 minutes / hour) * (60 seconds / hour)
+ hours, remainder = divmod(remainder, 1 * 60 * 60)
+
+ # (1 minute) * (60 seconds / minute)
+ minutes, seconds = divmod(remainder, 1 * 60)
+
+ # Add any remaining days to the correcteddays count
+ correcteddays += days
+
+ if correcteddays == 0:
+ return "-{0}:{1:02}:{2:02} UTC".format(hours, minutes, seconds)
+ elif correcteddays == 1:
+ return "-1 day, {0}:{1:02}:{2:02} UTC".format(hours, minutes, seconds)
+
+ return "-{0} days, {1}:{2:02}:{3:02} UTC".format(
+ correcteddays, hours, minutes, seconds
+ )
+
+ def utcoffset(self, dt):
+ return self._utcdelta
+
+ def tzname(self, dt):
+ return self._name
+
+ def dst(self, dt):
+ # ISO 8601 specifies offsets should be different if DST is required,
+ # instead of allowing for a DST to be specified
+ # https://docs.python.org/2/library/datetime.html#datetime.tzinfo.dst
+ return datetime.timedelta(0)
diff --git a/libs/attr/__init__.py b/libs/attr/__init__.py
new file mode 100644
index 000000000..386305d62
--- /dev/null
+++ b/libs/attr/__init__.py
@@ -0,0 +1,79 @@
+# SPDX-License-Identifier: MIT
+
+
+import sys
+
+from functools import partial
+
+from . import converters, exceptions, filters, setters, validators
+from ._cmp import cmp_using
+from ._config import get_run_validators, set_run_validators
+from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
+from ._make import (
+ NOTHING,
+ Attribute,
+ Factory,
+ attrib,
+ attrs,
+ fields,
+ fields_dict,
+ make_class,
+ validate,
+)
+from ._version_info import VersionInfo
+
+
+__version__ = "22.1.0"
+__version_info__ = VersionInfo._from_version_string(__version__)
+
+__title__ = "attrs"
+__description__ = "Classes Without Boilerplate"
+__url__ = "https://www.attrs.org/"
+__uri__ = __url__
+__doc__ = __description__ + " <" + __uri__ + ">"
+
+__author__ = "Hynek Schlawack"
+__email__ = "[email protected]"
+
+__license__ = "MIT"
+__copyright__ = "Copyright (c) 2015 Hynek Schlawack"
+
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
+
+__all__ = [
+ "Attribute",
+ "Factory",
+ "NOTHING",
+ "asdict",
+ "assoc",
+ "astuple",
+ "attr",
+ "attrib",
+ "attributes",
+ "attrs",
+ "cmp_using",
+ "converters",
+ "evolve",
+ "exceptions",
+ "fields",
+ "fields_dict",
+ "filters",
+ "get_run_validators",
+ "has",
+ "ib",
+ "make_class",
+ "resolve_types",
+ "s",
+ "set_run_validators",
+ "setters",
+ "validate",
+ "validators",
+]
+
+if sys.version_info[:2] >= (3, 6):
+ from ._next_gen import define, field, frozen, mutable # noqa: F401
+
+ __all__.extend(("define", "field", "frozen", "mutable"))
diff --git a/libs/attr/__init__.pyi b/libs/attr/__init__.pyi
new file mode 100644
index 000000000..03cc4c82d
--- /dev/null
+++ b/libs/attr/__init__.pyi
@@ -0,0 +1,486 @@
+import sys
+
+from typing import (
+ Any,
+ Callable,
+ ClassVar,
+ Dict,
+ Generic,
+ List,
+ Mapping,
+ Optional,
+ Protocol,
+ Sequence,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ overload,
+)
+
+# `import X as X` is required to make these public
+from . import converters as converters
+from . import exceptions as exceptions
+from . import filters as filters
+from . import setters as setters
+from . import validators as validators
+from ._cmp import cmp_using as cmp_using
+from ._version_info import VersionInfo
+
+__version__: str
+__version_info__: VersionInfo
+__title__: str
+__description__: str
+__url__: str
+__uri__: str
+__author__: str
+__email__: str
+__license__: str
+__copyright__: str
+
+_T = TypeVar("_T")
+_C = TypeVar("_C", bound=type)
+
+_EqOrderType = Union[bool, Callable[[Any], Any]]
+_ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
+_ConverterType = Callable[[Any], Any]
+_FilterType = Callable[[Attribute[_T], _T], bool]
+_ReprType = Callable[[Any], str]
+_ReprArgType = Union[bool, _ReprType]
+_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any]
+_OnSetAttrArgType = Union[
+ _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType
+]
+_FieldTransformer = Callable[
+ [type, List[Attribute[Any]]], List[Attribute[Any]]
+]
+# FIXME: in reality, if multiple validators are passed they must be in a list
+# or tuple, but those are invariant and so would prevent subtypes of
+# _ValidatorType from working when passed in a list or tuple.
+_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
+
+# A protocol to be able to statically accept an attrs class.
+class AttrsInstance(Protocol):
+ __attrs_attrs__: ClassVar[Any]
+
+# _make --
+
+NOTHING: object
+
+# NOTE: Factory lies about its return type to make this possible:
+# `x: List[int] # = Factory(list)`
+# Work around mypy issue #4554 in the common case by using an overload.
+if sys.version_info >= (3, 8):
+ from typing import Literal
+ @overload
+ def Factory(factory: Callable[[], _T]) -> _T: ...
+ @overload
+ def Factory(
+ factory: Callable[[Any], _T],
+ takes_self: Literal[True],
+ ) -> _T: ...
+ @overload
+ def Factory(
+ factory: Callable[[], _T],
+ takes_self: Literal[False],
+ ) -> _T: ...
+
+else:
+ @overload
+ def Factory(factory: Callable[[], _T]) -> _T: ...
+ @overload
+ def Factory(
+ factory: Union[Callable[[Any], _T], Callable[[], _T]],
+ takes_self: bool = ...,
+ ) -> _T: ...
+
+# Static type inference support via __dataclass_transform__ implemented as per:
+# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md
+# This annotation must be applied to all overloads of "define" and "attrs"
+#
+# NOTE: This is a typing construct and does not exist at runtime. Extensions
+# wrapping attrs decorators should declare a separate __dataclass_transform__
+# signature in the extension module using the specification linked above to
+# provide pyright support.
+def __dataclass_transform__(
+ *,
+ eq_default: bool = True,
+ order_default: bool = False,
+ kw_only_default: bool = False,
+ field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
+) -> Callable[[_T], _T]: ...
+
+class Attribute(Generic[_T]):
+ name: str
+ default: Optional[_T]
+ validator: Optional[_ValidatorType[_T]]
+ repr: _ReprArgType
+ cmp: _EqOrderType
+ eq: _EqOrderType
+ order: _EqOrderType
+ hash: Optional[bool]
+ init: bool
+ converter: Optional[_ConverterType]
+ metadata: Dict[Any, Any]
+ type: Optional[Type[_T]]
+ kw_only: bool
+ on_setattr: _OnSetAttrType
+ def evolve(self, **changes: Any) -> "Attribute[Any]": ...
+
+# NOTE: We had several choices for the annotation to use for type arg:
+# 1) Type[_T]
+# - Pros: Handles simple cases correctly
+# - Cons: Might produce less informative errors in the case of conflicting
+# TypeVars e.g. `attr.ib(default='bad', type=int)`
+# 2) Callable[..., _T]
+# - Pros: Better error messages than #1 for conflicting TypeVars
+# - Cons: Terrible error messages for validator checks.
+# e.g. attr.ib(type=int, validator=validate_str)
+# -> error: Cannot infer function type argument
+# 3) type (and do all of the work in the mypy plugin)
+# - Pros: Simple here, and we could customize the plugin with our own errors.
+# - Cons: Would need to write mypy plugin code to handle all the cases.
+# We chose option #1.
+
+# `attr` lies about its return type to make the following possible:
+# attr() -> Any
+# attr(8) -> int
+# attr(validator=<some callable>) -> Whatever the callable expects.
+# This makes this type of assignments possible:
+# x: int = attr(8)
+#
+# This form catches explicit None or no default but with no other arguments
+# returns Any.
+@overload
+def attrib(
+ default: None = ...,
+ validator: None = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: None = ...,
+ converter: None = ...,
+ factory: None = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def attrib(
+ default: None = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: Optional[Type[_T]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def attrib(
+ default: _T,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: Optional[Type[_T]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def attrib(
+ default: Optional[_T] = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: object = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+@overload
+def field(
+ *,
+ default: None = ...,
+ validator: None = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: None = ...,
+ factory: None = ...,
+ kw_only: bool = ...,
+ eq: Optional[bool] = ...,
+ order: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def field(
+ *,
+ default: None = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def field(
+ *,
+ default: _T,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def field(
+ *,
+ default: Optional[_T] = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+@overload
+@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
+def attrs(
+ maybe_cls: _C,
+ these: Optional[Dict[str, Any]] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ auto_detect: bool = ...,
+ collect_by_mro: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+ match_args: bool = ...,
+) -> _C: ...
+@overload
+@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
+def attrs(
+ maybe_cls: None = ...,
+ these: Optional[Dict[str, Any]] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ auto_detect: bool = ...,
+ collect_by_mro: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+ match_args: bool = ...,
+) -> Callable[[_C], _C]: ...
+@overload
+@__dataclass_transform__(field_descriptors=(attrib, field))
+def define(
+ maybe_cls: _C,
+ *,
+ these: Optional[Dict[str, Any]] = ...,
+ repr: bool = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[bool] = ...,
+ order: Optional[bool] = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+ match_args: bool = ...,
+) -> _C: ...
+@overload
+@__dataclass_transform__(field_descriptors=(attrib, field))
+def define(
+ maybe_cls: None = ...,
+ *,
+ these: Optional[Dict[str, Any]] = ...,
+ repr: bool = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[bool] = ...,
+ order: Optional[bool] = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+ match_args: bool = ...,
+) -> Callable[[_C], _C]: ...
+
+mutable = define
+frozen = define # they differ only in their defaults
+
+def fields(cls: Type[AttrsInstance]) -> Any: ...
+def fields_dict(cls: Type[AttrsInstance]) -> Dict[str, Attribute[Any]]: ...
+def validate(inst: AttrsInstance) -> None: ...
+def resolve_types(
+ cls: _C,
+ globalns: Optional[Dict[str, Any]] = ...,
+ localns: Optional[Dict[str, Any]] = ...,
+ attribs: Optional[List[Attribute[Any]]] = ...,
+) -> _C: ...
+
+# TODO: add support for returning a proper attrs class from the mypy plugin
+# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
+# [attr.ib()])` is valid
+def make_class(
+ name: str,
+ attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
+ bases: Tuple[type, ...] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ collect_by_mro: bool = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+) -> type: ...
+
+# _funcs --
+
+# TODO: add support for returning TypedDict from the mypy plugin
+# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
+# these:
+# https://github.com/python/mypy/issues/4236
+# https://github.com/python/typing/issues/253
+# XXX: remember to fix attrs.asdict/astuple too!
+def asdict(
+ inst: AttrsInstance,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ dict_factory: Type[Mapping[Any, Any]] = ...,
+ retain_collection_types: bool = ...,
+ value_serializer: Optional[
+ Callable[[type, Attribute[Any], Any], Any]
+ ] = ...,
+ tuple_keys: Optional[bool] = ...,
+) -> Dict[str, Any]: ...
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+def astuple(
+ inst: AttrsInstance,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ tuple_factory: Type[Sequence[Any]] = ...,
+ retain_collection_types: bool = ...,
+) -> Tuple[Any, ...]: ...
+def has(cls: type) -> bool: ...
+def assoc(inst: _T, **changes: Any) -> _T: ...
+def evolve(inst: _T, **changes: Any) -> _T: ...
+
+# _config --
+
+def set_run_validators(run: bool) -> None: ...
+def get_run_validators() -> bool: ...
+
+# aliases --
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
diff --git a/libs/attr/_cmp.py b/libs/attr/_cmp.py
new file mode 100644
index 000000000..81b99e4c3
--- /dev/null
+++ b/libs/attr/_cmp.py
@@ -0,0 +1,155 @@
+# SPDX-License-Identifier: MIT
+
+
+import functools
+import types
+
+from ._make import _make_ne
+
+
+_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
+
+
+def cmp_using(
+ eq=None,
+ lt=None,
+ le=None,
+ gt=None,
+ ge=None,
+ require_same_type=True,
+ class_name="Comparable",
+):
+ """
+ Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and
+ ``cmp`` arguments to customize field comparison.
+
+ The resulting class will have a full set of ordering methods if
+ at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
+
+ :param Optional[callable] eq: `callable` used to evaluate equality
+ of two objects.
+ :param Optional[callable] lt: `callable` used to evaluate whether
+ one object is less than another object.
+ :param Optional[callable] le: `callable` used to evaluate whether
+ one object is less than or equal to another object.
+ :param Optional[callable] gt: `callable` used to evaluate whether
+ one object is greater than another object.
+ :param Optional[callable] ge: `callable` used to evaluate whether
+ one object is greater than or equal to another object.
+
+ :param bool require_same_type: When `True`, equality and ordering methods
+ will return `NotImplemented` if objects are not of the same type.
+
+ :param Optional[str] class_name: Name of class. Defaults to 'Comparable'.
+
+ See `comparison` for more details.
+
+ .. versionadded:: 21.1.0
+ """
+
+ body = {
+ "__slots__": ["value"],
+ "__init__": _make_init(),
+ "_requirements": [],
+ "_is_comparable_to": _is_comparable_to,
+ }
+
+ # Add operations.
+ num_order_functions = 0
+ has_eq_function = False
+
+ if eq is not None:
+ has_eq_function = True
+ body["__eq__"] = _make_operator("eq", eq)
+ body["__ne__"] = _make_ne()
+
+ if lt is not None:
+ num_order_functions += 1
+ body["__lt__"] = _make_operator("lt", lt)
+
+ if le is not None:
+ num_order_functions += 1
+ body["__le__"] = _make_operator("le", le)
+
+ if gt is not None:
+ num_order_functions += 1
+ body["__gt__"] = _make_operator("gt", gt)
+
+ if ge is not None:
+ num_order_functions += 1
+ body["__ge__"] = _make_operator("ge", ge)
+
+ type_ = types.new_class(
+ class_name, (object,), {}, lambda ns: ns.update(body)
+ )
+
+ # Add same type requirement.
+ if require_same_type:
+ type_._requirements.append(_check_same_type)
+
+ # Add total ordering if at least one operation was defined.
+ if 0 < num_order_functions < 4:
+ if not has_eq_function:
+ # functools.total_ordering requires __eq__ to be defined,
+ # so raise early error here to keep a nice stack.
+ raise ValueError(
+ "eq must be define is order to complete ordering from "
+ "lt, le, gt, ge."
+ )
+ type_ = functools.total_ordering(type_)
+
+ return type_
+
+
+def _make_init():
+ """
+ Create __init__ method.
+ """
+
+ def __init__(self, value):
+ """
+ Initialize object with *value*.
+ """
+ self.value = value
+
+ return __init__
+
+
+def _make_operator(name, func):
+ """
+ Create operator method.
+ """
+
+ def method(self, other):
+ if not self._is_comparable_to(other):
+ return NotImplemented
+
+ result = func(self.value, other.value)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return result
+
+ method.__name__ = "__%s__" % (name,)
+ method.__doc__ = "Return a %s b. Computed by attrs." % (
+ _operation_names[name],
+ )
+
+ return method
+
+
+def _is_comparable_to(self, other):
+ """
+ Check whether `other` is comparable to `self`.
+ """
+ for func in self._requirements:
+ if not func(self, other):
+ return False
+ return True
+
+
+def _check_same_type(self, other):
+ """
+ Return True if *self* and *other* are of the same type, False otherwise.
+ """
+ return other.value.__class__ is self.value.__class__
diff --git a/libs/attr/_cmp.pyi b/libs/attr/_cmp.pyi
new file mode 100644
index 000000000..35437eff6
--- /dev/null
+++ b/libs/attr/_cmp.pyi
@@ -0,0 +1,13 @@
+from typing import Any, Callable, Optional, Type
+
+_CompareWithType = Callable[[Any, Any], bool]
+
+def cmp_using(
+ eq: Optional[_CompareWithType],
+ lt: Optional[_CompareWithType],
+ le: Optional[_CompareWithType],
+ gt: Optional[_CompareWithType],
+ ge: Optional[_CompareWithType],
+ require_same_type: bool,
+ class_name: str,
+) -> Type: ...
diff --git a/libs/attr/_compat.py b/libs/attr/_compat.py
new file mode 100644
index 000000000..582649325
--- /dev/null
+++ b/libs/attr/_compat.py
@@ -0,0 +1,185 @@
+# SPDX-License-Identifier: MIT
+
+
+import inspect
+import platform
+import sys
+import threading
+import types
+import warnings
+
+from collections.abc import Mapping, Sequence # noqa
+
+
+PYPY = platform.python_implementation() == "PyPy"
+PY36 = sys.version_info[:2] >= (3, 6)
+HAS_F_STRINGS = PY36
+PY310 = sys.version_info[:2] >= (3, 10)
+
+
+if PYPY or PY36:
+ ordered_dict = dict
+else:
+ from collections import OrderedDict
+
+ ordered_dict = OrderedDict
+
+
+def just_warn(*args, **kw):
+ warnings.warn(
+ "Running interpreter doesn't sufficiently support code object "
+ "introspection. Some features like bare super() or accessing "
+ "__class__ will not work with slotted classes.",
+ RuntimeWarning,
+ stacklevel=2,
+ )
+
+
+class _AnnotationExtractor:
+ """
+ Extract type annotations from a callable, returning None whenever there
+ is none.
+ """
+
+ __slots__ = ["sig"]
+
+ def __init__(self, callable):
+ try:
+ self.sig = inspect.signature(callable)
+ except (ValueError, TypeError): # inspect failed
+ self.sig = None
+
+ def get_first_param_type(self):
+ """
+ Return the type annotation of the first argument if it's not empty.
+ """
+ if not self.sig:
+ return None
+
+ params = list(self.sig.parameters.values())
+ if params and params[0].annotation is not inspect.Parameter.empty:
+ return params[0].annotation
+
+ return None
+
+ def get_return_type(self):
+ """
+ Return the return type if it's not empty.
+ """
+ if (
+ self.sig
+ and self.sig.return_annotation is not inspect.Signature.empty
+ ):
+ return self.sig.return_annotation
+
+ return None
+
+
+def make_set_closure_cell():
+ """Return a function of two arguments (cell, value) which sets
+ the value stored in the closure cell `cell` to `value`.
+ """
+ # pypy makes this easy. (It also supports the logic below, but
+ # why not do the easy/fast thing?)
+ if PYPY:
+
+ def set_closure_cell(cell, value):
+ cell.__setstate__((value,))
+
+ return set_closure_cell
+
+ # Otherwise gotta do it the hard way.
+
+ # Create a function that will set its first cellvar to `value`.
+ def set_first_cellvar_to(value):
+ x = value
+ return
+
+ # This function will be eliminated as dead code, but
+ # not before its reference to `x` forces `x` to be
+ # represented as a closure cell rather than a local.
+ def force_x_to_be_a_cell(): # pragma: no cover
+ return x
+
+ try:
+ # Extract the code object and make sure our assumptions about
+ # the closure behavior are correct.
+ co = set_first_cellvar_to.__code__
+ if co.co_cellvars != ("x",) or co.co_freevars != ():
+ raise AssertionError # pragma: no cover
+
+ # Convert this code object to a code object that sets the
+ # function's first _freevar_ (not cellvar) to the argument.
+ if sys.version_info >= (3, 8):
+
+ def set_closure_cell(cell, value):
+ cell.cell_contents = value
+
+ else:
+ args = [co.co_argcount]
+ args.append(co.co_kwonlyargcount)
+ args.extend(
+ [
+ co.co_nlocals,
+ co.co_stacksize,
+ co.co_flags,
+ co.co_code,
+ co.co_consts,
+ co.co_names,
+ co.co_varnames,
+ co.co_filename,
+ co.co_name,
+ co.co_firstlineno,
+ co.co_lnotab,
+ # These two arguments are reversed:
+ co.co_cellvars,
+ co.co_freevars,
+ ]
+ )
+ set_first_freevar_code = types.CodeType(*args)
+
+ def set_closure_cell(cell, value):
+ # Create a function using the set_first_freevar_code,
+ # whose first closure cell is `cell`. Calling it will
+ # change the value of that cell.
+ setter = types.FunctionType(
+ set_first_freevar_code, {}, "setter", (), (cell,)
+ )
+ # And call it to set the cell.
+ setter(value)
+
+ # Make sure it works on this interpreter:
+ def make_func_with_cell():
+ x = None
+
+ def func():
+ return x # pragma: no cover
+
+ return func
+
+ cell = make_func_with_cell().__closure__[0]
+ set_closure_cell(cell, 100)
+ if cell.cell_contents != 100:
+ raise AssertionError # pragma: no cover
+
+ except Exception:
+ return just_warn
+ else:
+ return set_closure_cell
+
+
+set_closure_cell = make_set_closure_cell()
+
+# Thread-local global to track attrs instances which are already being repr'd.
+# This is needed because there is no other (thread-safe) way to pass info
+# about the instances that are already being repr'd through the call stack
+# in order to ensure we don't perform infinite recursion.
+#
+# For instance, if an instance contains a dict which contains that instance,
+# we need to know that we're already repr'ing the outside instance from within
+# the dict's repr() call.
+#
+# This lives here rather than in _make.py so that the functions in _make.py
+# don't have a direct reference to the thread-local in their globals dict.
+# If they have such a reference, it breaks cloudpickle.
+repr_context = threading.local()
diff --git a/libs/attr/_config.py b/libs/attr/_config.py
new file mode 100644
index 000000000..96d420077
--- /dev/null
+++ b/libs/attr/_config.py
@@ -0,0 +1,31 @@
+# SPDX-License-Identifier: MIT
+
+
+__all__ = ["set_run_validators", "get_run_validators"]
+
+_run_validators = True
+
+
+def set_run_validators(run):
+ """
+ Set whether or not validators are run. By default, they are run.
+
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+ moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
+ instead.
+ """
+ if not isinstance(run, bool):
+ raise TypeError("'run' must be bool.")
+ global _run_validators
+ _run_validators = run
+
+
+def get_run_validators():
+ """
+ Return whether or not validators are run.
+
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+ moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
+ instead.
+ """
+ return _run_validators
diff --git a/libs/attr/_funcs.py b/libs/attr/_funcs.py
new file mode 100644
index 000000000..a982d7cb5
--- /dev/null
+++ b/libs/attr/_funcs.py
@@ -0,0 +1,420 @@
+# SPDX-License-Identifier: MIT
+
+
+import copy
+
+from ._make import NOTHING, _obj_setattr, fields
+from .exceptions import AttrsAttributeNotFoundError
+
+
+def asdict(
+ inst,
+ recurse=True,
+ filter=None,
+ dict_factory=dict,
+ retain_collection_types=False,
+ value_serializer=None,
+):
+ """
+ Return the ``attrs`` attribute values of *inst* as a dict.
+
+ Optionally recurse into other ``attrs``-decorated classes.
+
+ :param inst: Instance of an ``attrs``-decorated class.
+ :param bool recurse: Recurse into classes that are also
+ ``attrs``-decorated.
+ :param callable filter: A callable whose return code determines whether an
+ attribute or element is included (``True``) or dropped (``False``). Is
+ called with the `attrs.Attribute` as the first argument and the
+ value as the second argument.
+ :param callable dict_factory: A callable to produce dictionaries from. For
+ example, to produce ordered dictionaries instead of normal Python
+ dictionaries, pass in ``collections.OrderedDict``.
+ :param bool retain_collection_types: Do not convert to ``list`` when
+ encountering an attribute whose type is ``tuple`` or ``set``. Only
+ meaningful if ``recurse`` is ``True``.
+ :param Optional[callable] value_serializer: A hook that is called for every
+ attribute or dict key/value. It receives the current instance, field
+ and value and must return the (updated) value. The hook is run *after*
+ the optional *filter* has been applied.
+
+ :rtype: return type of *dict_factory*
+
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 16.0.0 *dict_factory*
+ .. versionadded:: 16.1.0 *retain_collection_types*
+ .. versionadded:: 20.3.0 *value_serializer*
+ .. versionadded:: 21.3.0 If a dict has a collection for a key, it is
+ serialized as a tuple.
+ """
+ attrs = fields(inst.__class__)
+ rv = dict_factory()
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+
+ if value_serializer is not None:
+ v = value_serializer(inst, a, v)
+
+ if recurse is True:
+ if has(v.__class__):
+ rv[a.name] = asdict(
+ v,
+ recurse=True,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ elif isinstance(v, (tuple, list, set, frozenset)):
+ cf = v.__class__ if retain_collection_types is True else list
+ rv[a.name] = cf(
+ [
+ _asdict_anything(
+ i,
+ is_key=False,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ for i in v
+ ]
+ )
+ elif isinstance(v, dict):
+ df = dict_factory
+ rv[a.name] = df(
+ (
+ _asdict_anything(
+ kk,
+ is_key=True,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ _asdict_anything(
+ vv,
+ is_key=False,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ )
+ for kk, vv in v.items()
+ )
+ else:
+ rv[a.name] = v
+ else:
+ rv[a.name] = v
+ return rv
+
+
+def _asdict_anything(
+ val,
+ is_key,
+ filter,
+ dict_factory,
+ retain_collection_types,
+ value_serializer,
+):
+ """
+ ``asdict`` only works on attrs instances, this works on anything.
+ """
+ if getattr(val.__class__, "__attrs_attrs__", None) is not None:
+ # Attrs class.
+ rv = asdict(
+ val,
+ recurse=True,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ elif isinstance(val, (tuple, list, set, frozenset)):
+ if retain_collection_types is True:
+ cf = val.__class__
+ elif is_key:
+ cf = tuple
+ else:
+ cf = list
+
+ rv = cf(
+ [
+ _asdict_anything(
+ i,
+ is_key=False,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ for i in val
+ ]
+ )
+ elif isinstance(val, dict):
+ df = dict_factory
+ rv = df(
+ (
+ _asdict_anything(
+ kk,
+ is_key=True,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ _asdict_anything(
+ vv,
+ is_key=False,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ )
+ for kk, vv in val.items()
+ )
+ else:
+ rv = val
+ if value_serializer is not None:
+ rv = value_serializer(None, None, rv)
+
+ return rv
+
+
+def astuple(
+ inst,
+ recurse=True,
+ filter=None,
+ tuple_factory=tuple,
+ retain_collection_types=False,
+):
+ """
+ Return the ``attrs`` attribute values of *inst* as a tuple.
+
+ Optionally recurse into other ``attrs``-decorated classes.
+
+ :param inst: Instance of an ``attrs``-decorated class.
+ :param bool recurse: Recurse into classes that are also
+ ``attrs``-decorated.
+ :param callable filter: A callable whose return code determines whether an
+ attribute or element is included (``True``) or dropped (``False``). Is
+ called with the `attrs.Attribute` as the first argument and the
+ value as the second argument.
+ :param callable tuple_factory: A callable to produce tuples from. For
+ example, to produce lists instead of tuples.
+ :param bool retain_collection_types: Do not convert to ``list``
+ or ``dict`` when encountering an attribute which type is
+ ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is
+ ``True``.
+
+ :rtype: return type of *tuple_factory*
+
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 16.2.0
+ """
+ attrs = fields(inst.__class__)
+ rv = []
+ retain = retain_collection_types # Very long. :/
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+ if recurse is True:
+ if has(v.__class__):
+ rv.append(
+ astuple(
+ v,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ )
+ elif isinstance(v, (tuple, list, set, frozenset)):
+ cf = v.__class__ if retain is True else list
+ rv.append(
+ cf(
+ [
+ astuple(
+ j,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(j.__class__)
+ else j
+ for j in v
+ ]
+ )
+ )
+ elif isinstance(v, dict):
+ df = v.__class__ if retain is True else dict
+ rv.append(
+ df(
+ (
+ astuple(
+ kk,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(kk.__class__)
+ else kk,
+ astuple(
+ vv,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(vv.__class__)
+ else vv,
+ )
+ for kk, vv in v.items()
+ )
+ )
+ else:
+ rv.append(v)
+ else:
+ rv.append(v)
+
+ return rv if tuple_factory is list else tuple_factory(rv)
+
+
+def has(cls):
+ """
+ Check whether *cls* is a class with ``attrs`` attributes.
+
+ :param type cls: Class to introspect.
+ :raise TypeError: If *cls* is not a class.
+
+ :rtype: bool
+ """
+ return getattr(cls, "__attrs_attrs__", None) is not None
+
+
+def assoc(inst, **changes):
+ """
+ Copy *inst* and apply *changes*.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ :param changes: Keyword changes in the new copy.
+
+ :return: A copy of inst with *changes* incorporated.
+
+ :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
+ be found on *cls*.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. deprecated:: 17.1.0
+ Use `attrs.evolve` instead if you can.
+ This function will not be removed du to the slightly different approach
+ compared to `attrs.evolve`.
+ """
+ import warnings
+
+ warnings.warn(
+ "assoc is deprecated and will be removed after 2018/01.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ new = copy.copy(inst)
+ attrs = fields(inst.__class__)
+ for k, v in changes.items():
+ a = getattr(attrs, k, NOTHING)
+ if a is NOTHING:
+ raise AttrsAttributeNotFoundError(
+ "{k} is not an attrs attribute on {cl}.".format(
+ k=k, cl=new.__class__
+ )
+ )
+ _obj_setattr(new, k, v)
+ return new
+
+
+def evolve(inst, **changes):
+ """
+ Create a new instance, based on *inst* with *changes* applied.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ :param changes: Keyword changes in the new copy.
+
+ :return: A copy of inst with *changes* incorporated.
+
+ :raise TypeError: If *attr_name* couldn't be found in the class
+ ``__init__``.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 17.1.0
+ """
+ cls = inst.__class__
+ attrs = fields(cls)
+ for a in attrs:
+ if not a.init:
+ continue
+ attr_name = a.name # To deal with private attributes.
+ init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
+ if init_name not in changes:
+ changes[init_name] = getattr(inst, attr_name)
+
+ return cls(**changes)
+
+
+def resolve_types(cls, globalns=None, localns=None, attribs=None):
+ """
+ Resolve any strings and forward annotations in type annotations.
+
+ This is only required if you need concrete types in `Attribute`'s *type*
+ field. In other words, you don't need to resolve your types if you only
+ use them for static type checking.
+
+ With no arguments, names will be looked up in the module in which the class
+ was created. If this is not what you want, e.g. if the name only exists
+ inside a method, you may pass *globalns* or *localns* to specify other
+ dictionaries in which to look up these names. See the docs of
+ `typing.get_type_hints` for more details.
+
+ :param type cls: Class to resolve.
+ :param Optional[dict] globalns: Dictionary containing global variables.
+ :param Optional[dict] localns: Dictionary containing local variables.
+ :param Optional[list] attribs: List of attribs for the given class.
+ This is necessary when calling from inside a ``field_transformer``
+ since *cls* is not an ``attrs`` class yet.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class and you didn't pass any attribs.
+ :raise NameError: If types cannot be resolved because of missing variables.
+
+ :returns: *cls* so you can use this function also as a class decorator.
+ Please note that you have to apply it **after** `attrs.define`. That
+ means the decorator has to come in the line **before** `attrs.define`.
+
+ .. versionadded:: 20.1.0
+ .. versionadded:: 21.1.0 *attribs*
+
+ """
+ # Since calling get_type_hints is expensive we cache whether we've
+ # done it already.
+ if getattr(cls, "__attrs_types_resolved__", None) != cls:
+ import typing
+
+ hints = typing.get_type_hints(cls, globalns=globalns, localns=localns)
+ for field in fields(cls) if attribs is None else attribs:
+ if field.name in hints:
+ # Since fields have been frozen we must work around it.
+ _obj_setattr(field, "type", hints[field.name])
+ # We store the class we resolved so that subclasses know they haven't
+ # been resolved.
+ cls.__attrs_types_resolved__ = cls
+
+ # Return the class so you can use it as a decorator too.
+ return cls
diff --git a/libs/attr/_make.py b/libs/attr/_make.py
new file mode 100644
index 000000000..4d1afe3fc
--- /dev/null
+++ b/libs/attr/_make.py
@@ -0,0 +1,3006 @@
+# SPDX-License-Identifier: MIT
+
+import copy
+import linecache
+import sys
+import types
+import typing
+
+from operator import itemgetter
+
+# We need to import _compat itself in addition to the _compat members to avoid
+# having the thread-local in the globals here.
+from . import _compat, _config, setters
+from ._compat import (
+ HAS_F_STRINGS,
+ PY310,
+ PYPY,
+ _AnnotationExtractor,
+ ordered_dict,
+ set_closure_cell,
+)
+from .exceptions import (
+ DefaultAlreadySetError,
+ FrozenInstanceError,
+ NotAnAttrsClassError,
+ UnannotatedAttributeError,
+)
+
+
+# This is used at least twice, so cache it here.
+_obj_setattr = object.__setattr__
+_init_converter_pat = "__attr_converter_%s"
+_init_factory_pat = "__attr_factory_{}"
+_tuple_property_pat = (
+ " {attr_name} = _attrs_property(_attrs_itemgetter({index}))"
+)
+_classvar_prefixes = (
+ "typing.ClassVar",
+ "t.ClassVar",
+ "ClassVar",
+ "typing_extensions.ClassVar",
+)
+# we don't use a double-underscore prefix because that triggers
+# name mangling when trying to create a slot for the field
+# (when slots=True)
+_hash_cache_field = "_attrs_cached_hash"
+
+_empty_metadata_singleton = types.MappingProxyType({})
+
+# Unique object for unequivocal getattr() defaults.
+_sentinel = object()
+
+_ng_default_on_setattr = setters.pipe(setters.convert, setters.validate)
+
+
+class _Nothing:
+ """
+ Sentinel class to indicate the lack of a value when ``None`` is ambiguous.
+
+ ``_Nothing`` is a singleton. There is only ever one of it.
+
+ .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False.
+ """
+
+ _singleton = None
+
+ def __new__(cls):
+ if _Nothing._singleton is None:
+ _Nothing._singleton = super().__new__(cls)
+ return _Nothing._singleton
+
+ def __repr__(self):
+ return "NOTHING"
+
+ def __bool__(self):
+ return False
+
+
+NOTHING = _Nothing()
+"""
+Sentinel to indicate the lack of a value when ``None`` is ambiguous.
+"""
+
+
+class _CacheHashWrapper(int):
+ """
+ An integer subclass that pickles / copies as None
+
+ This is used for non-slots classes with ``cache_hash=True``, to avoid
+ serializing a potentially (even likely) invalid hash value. Since ``None``
+ is the default value for uncalculated hashes, whenever this is copied,
+ the copy's value for the hash should automatically reset.
+
+ See GH #613 for more details.
+ """
+
+ def __reduce__(self, _none_constructor=type(None), _args=()):
+ return _none_constructor, _args
+
+
+def attrib(
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=None,
+ init=True,
+ metadata=None,
+ type=None,
+ converter=None,
+ factory=None,
+ kw_only=False,
+ eq=None,
+ order=None,
+ on_setattr=None,
+):
+ """
+ Create a new attribute on a class.
+
+ .. warning::
+
+ Does *not* do anything unless the class is also decorated with
+ `attr.s`!
+
+ :param default: A value that is used if an ``attrs``-generated ``__init__``
+ is used and no value is passed while instantiating or the attribute is
+ excluded using ``init=False``.
+
+ If the value is an instance of `attrs.Factory`, its callable will be
+ used to construct a new value (useful for mutable data types like lists
+ or dicts).
+
+ If a default is not set (or set manually to `attrs.NOTHING`), a value
+ *must* be supplied when instantiating; otherwise a `TypeError`
+ will be raised.
+
+ The default can also be set using decorator notation as shown below.
+
+ :type default: Any value
+
+ :param callable factory: Syntactic sugar for
+ ``default=attr.Factory(factory)``.
+
+ :param validator: `callable` that is called by ``attrs``-generated
+ ``__init__`` methods after the instance has been initialized. They
+ receive the initialized instance, the :func:`~attrs.Attribute`, and the
+ passed value.
+
+ The return value is *not* inspected so the validator has to throw an
+ exception itself.
+
+ If a `list` is passed, its items are treated as validators and must
+ all pass.
+
+ Validators can be globally disabled and re-enabled using
+ `get_run_validators`.
+
+ The validator can also be set using decorator notation as shown below.
+
+ :type validator: `callable` or a `list` of `callable`\\ s.
+
+ :param repr: Include this attribute in the generated ``__repr__``
+ method. If ``True``, include the attribute; if ``False``, omit it. By
+ default, the built-in ``repr()`` function is used. To override how the
+ attribute value is formatted, pass a ``callable`` that takes a single
+ value and returns a string. Note that the resulting string is used
+ as-is, i.e. it will be used directly *instead* of calling ``repr()``
+ (the default).
+ :type repr: a `bool` or a `callable` to use a custom function.
+
+ :param eq: If ``True`` (default), include this attribute in the
+ generated ``__eq__`` and ``__ne__`` methods that check two instances
+ for equality. To override how the attribute value is compared,
+ pass a ``callable`` that takes a single value and returns the value
+ to be compared.
+ :type eq: a `bool` or a `callable`.
+
+ :param order: If ``True`` (default), include this attributes in the
+ generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods.
+ To override how the attribute value is ordered,
+ pass a ``callable`` that takes a single value and returns the value
+ to be ordered.
+ :type order: a `bool` or a `callable`.
+
+ :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the
+ same value. Must not be mixed with *eq* or *order*.
+ :type cmp: a `bool` or a `callable`.
+
+ :param Optional[bool] hash: Include this attribute in the generated
+ ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This
+ is the correct behavior according the Python spec. Setting this value
+ to anything else than ``None`` is *discouraged*.
+ :param bool init: Include this attribute in the generated ``__init__``
+ method. It is possible to set this to ``False`` and set a default
+ value. In that case this attributed is unconditionally initialized
+ with the specified default value or factory.
+ :param callable converter: `callable` that is called by
+ ``attrs``-generated ``__init__`` methods to convert attribute's value
+ to the desired format. It is given the passed-in value, and the
+ returned value will be used as the new value of the attribute. The
+ value is converted before being passed to the validator, if any.
+ :param metadata: An arbitrary mapping, to be used by third-party
+ components. See `extending_metadata`.
+ :param type: The type of the attribute. In Python 3.6 or greater, the
+ preferred method to specify the type is using a variable annotation
+ (see :pep:`526`).
+ This argument is provided for backward compatibility.
+ Regardless of the approach used, the type will be stored on
+ ``Attribute.type``.
+
+ Please note that ``attrs`` doesn't do anything with this metadata by
+ itself. You can use it as part of your own code or for
+ `static type checking <types>`.
+ :param kw_only: Make this attribute keyword-only (Python 3+)
+ in the generated ``__init__`` (if ``init`` is ``False``, this
+ parameter is ignored).
+ :param on_setattr: Allows to overwrite the *on_setattr* setting from
+ `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used.
+ Set to `attrs.setters.NO_OP` to run **no** `setattr` hooks for this
+ attribute -- regardless of the setting in `attr.s`.
+ :type on_setattr: `callable`, or a list of callables, or `None`, or
+ `attrs.setters.NO_OP`
+
+ .. versionadded:: 15.2.0 *convert*
+ .. versionadded:: 16.3.0 *metadata*
+ .. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
+ .. versionchanged:: 17.1.0
+ *hash* is ``None`` and therefore mirrors *eq* by default.
+ .. versionadded:: 17.3.0 *type*
+ .. deprecated:: 17.4.0 *convert*
+ .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated
+ *convert* to achieve consistency with other noun-based arguments.
+ .. versionadded:: 18.1.0
+ ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionchanged:: 19.2.0 *convert* keyword argument removed.
+ .. versionchanged:: 19.2.0 *repr* also accepts a custom callable.
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.3.0 *kw_only* backported to Python 2
+ .. versionchanged:: 21.1.0
+ *eq*, *order*, and *cmp* also accept a custom callable
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
+ """
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq, order, True
+ )
+
+ if hash is not None and hash is not True and hash is not False:
+ raise TypeError(
+ "Invalid value for hash. Must be True, False, or None."
+ )
+
+ if factory is not None:
+ if default is not NOTHING:
+ raise ValueError(
+ "The `default` and `factory` arguments are mutually "
+ "exclusive."
+ )
+ if not callable(factory):
+ raise ValueError("The `factory` argument must be a callable.")
+ default = Factory(factory)
+
+ if metadata is None:
+ metadata = {}
+
+ # Apply syntactic sugar by auto-wrapping.
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
+ if validator and isinstance(validator, (list, tuple)):
+ validator = and_(*validator)
+
+ if converter and isinstance(converter, (list, tuple)):
+ converter = pipe(*converter)
+
+ return _CountingAttr(
+ default=default,
+ validator=validator,
+ repr=repr,
+ cmp=None,
+ hash=hash,
+ init=init,
+ converter=converter,
+ metadata=metadata,
+ type=type,
+ kw_only=kw_only,
+ eq=eq,
+ eq_key=eq_key,
+ order=order,
+ order_key=order_key,
+ on_setattr=on_setattr,
+ )
+
+
+def _compile_and_eval(script, globs, locs=None, filename=""):
+ """
+ "Exec" the script with the given global (globs) and local (locs) variables.
+ """
+ bytecode = compile(script, filename, "exec")
+ eval(bytecode, globs, locs)
+
+
+def _make_method(name, script, filename, globs):
+ """
+ Create the method with the script given and return the method object.
+ """
+ locs = {}
+
+ # In order of debuggers like PDB being able to step through the code,
+ # we add a fake linecache entry.
+ count = 1
+ base_filename = filename
+ while True:
+ linecache_tuple = (
+ len(script),
+ None,
+ script.splitlines(True),
+ filename,
+ )
+ old_val = linecache.cache.setdefault(filename, linecache_tuple)
+ if old_val == linecache_tuple:
+ break
+ else:
+ filename = "{}-{}>".format(base_filename[:-1], count)
+ count += 1
+
+ _compile_and_eval(script, globs, locs, filename)
+
+ return locs[name]
+
+
+def _make_attr_tuple_class(cls_name, attr_names):
+ """
+ Create a tuple subclass to hold `Attribute`s for an `attrs` class.
+
+ The subclass is a bare tuple with properties for names.
+
+ class MyClassAttributes(tuple):
+ __slots__ = ()
+ x = property(itemgetter(0))
+ """
+ attr_class_name = "{}Attributes".format(cls_name)
+ attr_class_template = [
+ "class {}(tuple):".format(attr_class_name),
+ " __slots__ = ()",
+ ]
+ if attr_names:
+ for i, attr_name in enumerate(attr_names):
+ attr_class_template.append(
+ _tuple_property_pat.format(index=i, attr_name=attr_name)
+ )
+ else:
+ attr_class_template.append(" pass")
+ globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property}
+ _compile_and_eval("\n".join(attr_class_template), globs)
+ return globs[attr_class_name]
+
+
+# Tuple class for extracted attributes from a class definition.
+# `base_attrs` is a subset of `attrs`.
+_Attributes = _make_attr_tuple_class(
+ "_Attributes",
+ [
+ # all attributes to build dunder methods for
+ "attrs",
+ # attributes that have been inherited
+ "base_attrs",
+ # map inherited attributes to their originating classes
+ "base_attrs_map",
+ ],
+)
+
+
+def _is_class_var(annot):
+ """
+ Check whether *annot* is a typing.ClassVar.
+
+ The string comparison hack is used to avoid evaluating all string
+ annotations which would put attrs-based classes at a performance
+ disadvantage compared to plain old classes.
+ """
+ annot = str(annot)
+
+ # Annotation can be quoted.
+ if annot.startswith(("'", '"')) and annot.endswith(("'", '"')):
+ annot = annot[1:-1]
+
+ return annot.startswith(_classvar_prefixes)
+
+
+def _has_own_attribute(cls, attrib_name):
+ """
+ Check whether *cls* defines *attrib_name* (and doesn't just inherit it).
+
+ Requires Python 3.
+ """
+ attr = getattr(cls, attrib_name, _sentinel)
+ if attr is _sentinel:
+ return False
+
+ for base_cls in cls.__mro__[1:]:
+ a = getattr(base_cls, attrib_name, None)
+ if attr is a:
+ return False
+
+ return True
+
+
+def _get_annotations(cls):
+ """
+ Get annotations for *cls*.
+ """
+ if _has_own_attribute(cls, "__annotations__"):
+ return cls.__annotations__
+
+ return {}
+
+
+def _counter_getter(e):
+ """
+ Key function for sorting to avoid re-creating a lambda for every class.
+ """
+ return e[1].counter
+
+
+def _collect_base_attrs(cls, taken_attr_names):
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in reversed(cls.__mro__[1:-1]):
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.inherited or a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True)
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ # For each name, only keep the freshest definition i.e. the furthest at the
+ # back. base_attr_map is fine because it gets overwritten with every new
+ # instance.
+ filtered = []
+ seen = set()
+ for a in reversed(base_attrs):
+ if a.name in seen:
+ continue
+ filtered.insert(0, a)
+ seen.add(a.name)
+
+ return filtered, base_attr_map
+
+
+def _collect_base_attrs_broken(cls, taken_attr_names):
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+
+ N.B. *taken_attr_names* will be mutated.
+
+ Adhere to the old incorrect behavior.
+
+ Notably it collects from the front and considers inherited attributes which
+ leads to the buggy behavior reported in #428.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in cls.__mro__[1:-1]:
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True)
+ taken_attr_names.add(a.name)
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ return base_attrs, base_attr_map
+
+
+def _transform_attrs(
+ cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer
+):
+ """
+ Transform all `_CountingAttr`s on a class into `Attribute`s.
+
+ If *these* is passed, use that and don't look for them on the class.
+
+ *collect_by_mro* is True, collect them in the correct MRO order, otherwise
+ use the old -- incorrect -- order. See #428.
+
+ Return an `_Attributes`.
+ """
+ cd = cls.__dict__
+ anns = _get_annotations(cls)
+
+ if these is not None:
+ ca_list = [(name, ca) for name, ca in these.items()]
+
+ if not isinstance(these, ordered_dict):
+ ca_list.sort(key=_counter_getter)
+ elif auto_attribs is True:
+ ca_names = {
+ name
+ for name, attr in cd.items()
+ if isinstance(attr, _CountingAttr)
+ }
+ ca_list = []
+ annot_names = set()
+ for attr_name, type in anns.items():
+ if _is_class_var(type):
+ continue
+ annot_names.add(attr_name)
+ a = cd.get(attr_name, NOTHING)
+
+ if not isinstance(a, _CountingAttr):
+ if a is NOTHING:
+ a = attrib()
+ else:
+ a = attrib(default=a)
+ ca_list.append((attr_name, a))
+
+ unannotated = ca_names - annot_names
+ if len(unannotated) > 0:
+ raise UnannotatedAttributeError(
+ "The following `attr.ib`s lack a type annotation: "
+ + ", ".join(
+ sorted(unannotated, key=lambda n: cd.get(n).counter)
+ )
+ + "."
+ )
+ else:
+ ca_list = sorted(
+ (
+ (name, attr)
+ for name, attr in cd.items()
+ if isinstance(attr, _CountingAttr)
+ ),
+ key=lambda e: e[1].counter,
+ )
+
+ own_attrs = [
+ Attribute.from_counting_attr(
+ name=attr_name, ca=ca, type=anns.get(attr_name)
+ )
+ for attr_name, ca in ca_list
+ ]
+
+ if collect_by_mro:
+ base_attrs, base_attr_map = _collect_base_attrs(
+ cls, {a.name for a in own_attrs}
+ )
+ else:
+ base_attrs, base_attr_map = _collect_base_attrs_broken(
+ cls, {a.name for a in own_attrs}
+ )
+
+ if kw_only:
+ own_attrs = [a.evolve(kw_only=True) for a in own_attrs]
+ base_attrs = [a.evolve(kw_only=True) for a in base_attrs]
+
+ attrs = base_attrs + own_attrs
+
+ # Mandatory vs non-mandatory attr order only matters when they are part of
+ # the __init__ signature and when they aren't kw_only (which are moved to
+ # the end and can be mandatory or non-mandatory in any order, as they will
+ # be specified as keyword args anyway). Check the order of those attrs:
+ had_default = False
+ for a in (a for a in attrs if a.init is not False and a.kw_only is False):
+ if had_default is True and a.default is NOTHING:
+ raise ValueError(
+ "No mandatory attributes allowed after an attribute with a "
+ "default value or factory. Attribute in question: %r" % (a,)
+ )
+
+ if had_default is False and a.default is not NOTHING:
+ had_default = True
+
+ if field_transformer is not None:
+ attrs = field_transformer(cls, attrs)
+
+ # Create AttrsClass *after* applying the field_transformer since it may
+ # add or remove attributes!
+ attr_names = [a.name for a in attrs]
+ AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
+
+ return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map))
+
+
+if PYPY:
+
+ def _frozen_setattrs(self, name, value):
+ """
+ Attached to frozen classes as __setattr__.
+ """
+ if isinstance(self, BaseException) and name in (
+ "__cause__",
+ "__context__",
+ ):
+ BaseException.__setattr__(self, name, value)
+ return
+
+ raise FrozenInstanceError()
+
+else:
+
+ def _frozen_setattrs(self, name, value):
+ """
+ Attached to frozen classes as __setattr__.
+ """
+ raise FrozenInstanceError()
+
+
+def _frozen_delattrs(self, name):
+ """
+ Attached to frozen classes as __delattr__.
+ """
+ raise FrozenInstanceError()
+
+
+class _ClassBuilder:
+ """
+ Iteratively build *one* class.
+ """
+
+ __slots__ = (
+ "_attr_names",
+ "_attrs",
+ "_base_attr_map",
+ "_base_names",
+ "_cache_hash",
+ "_cls",
+ "_cls_dict",
+ "_delete_attribs",
+ "_frozen",
+ "_has_pre_init",
+ "_has_post_init",
+ "_is_exc",
+ "_on_setattr",
+ "_slots",
+ "_weakref_slot",
+ "_wrote_own_setattr",
+ "_has_custom_setattr",
+ )
+
+ def __init__(
+ self,
+ cls,
+ these,
+ slots,
+ frozen,
+ weakref_slot,
+ getstate_setstate,
+ auto_attribs,
+ kw_only,
+ cache_hash,
+ is_exc,
+ collect_by_mro,
+ on_setattr,
+ has_custom_setattr,
+ field_transformer,
+ ):
+ attrs, base_attrs, base_map = _transform_attrs(
+ cls,
+ these,
+ auto_attribs,
+ kw_only,
+ collect_by_mro,
+ field_transformer,
+ )
+
+ self._cls = cls
+ self._cls_dict = dict(cls.__dict__) if slots else {}
+ self._attrs = attrs
+ self._base_names = {a.name for a in base_attrs}
+ self._base_attr_map = base_map
+ self._attr_names = tuple(a.name for a in attrs)
+ self._slots = slots
+ self._frozen = frozen
+ self._weakref_slot = weakref_slot
+ self._cache_hash = cache_hash
+ self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False))
+ self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
+ self._delete_attribs = not bool(these)
+ self._is_exc = is_exc
+ self._on_setattr = on_setattr
+
+ self._has_custom_setattr = has_custom_setattr
+ self._wrote_own_setattr = False
+
+ self._cls_dict["__attrs_attrs__"] = self._attrs
+
+ if frozen:
+ self._cls_dict["__setattr__"] = _frozen_setattrs
+ self._cls_dict["__delattr__"] = _frozen_delattrs
+
+ self._wrote_own_setattr = True
+ elif on_setattr in (
+ _ng_default_on_setattr,
+ setters.validate,
+ setters.convert,
+ ):
+ has_validator = has_converter = False
+ for a in attrs:
+ if a.validator is not None:
+ has_validator = True
+ if a.converter is not None:
+ has_converter = True
+
+ if has_validator and has_converter:
+ break
+ if (
+ (
+ on_setattr == _ng_default_on_setattr
+ and not (has_validator or has_converter)
+ )
+ or (on_setattr == setters.validate and not has_validator)
+ or (on_setattr == setters.convert and not has_converter)
+ ):
+ # If class-level on_setattr is set to convert + validate, but
+ # there's no field to convert or validate, pretend like there's
+ # no on_setattr.
+ self._on_setattr = None
+
+ if getstate_setstate:
+ (
+ self._cls_dict["__getstate__"],
+ self._cls_dict["__setstate__"],
+ ) = self._make_getstate_setstate()
+
+ def __repr__(self):
+ return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__)
+
+ def build_class(self):
+ """
+ Finalize class based on the accumulated configuration.
+
+ Builder cannot be used after calling this method.
+ """
+ if self._slots is True:
+ return self._create_slots_class()
+ else:
+ return self._patch_original_class()
+
+ def _patch_original_class(self):
+ """
+ Apply accumulated methods and return the class.
+ """
+ cls = self._cls
+ base_names = self._base_names
+
+ # Clean class of attribute definitions (`attr.ib()`s).
+ if self._delete_attribs:
+ for name in self._attr_names:
+ if (
+ name not in base_names
+ and getattr(cls, name, _sentinel) is not _sentinel
+ ):
+ try:
+ delattr(cls, name)
+ except AttributeError:
+ # This can happen if a base class defines a class
+ # variable and we want to set an attribute with the
+ # same name by using only a type annotation.
+ pass
+
+ # Attach our dunder methods.
+ for name, value in self._cls_dict.items():
+ setattr(cls, name, value)
+
+ # If we've inherited an attrs __setattr__ and don't write our own,
+ # reset it to object's.
+ if not self._wrote_own_setattr and getattr(
+ cls, "__attrs_own_setattr__", False
+ ):
+ cls.__attrs_own_setattr__ = False
+
+ if not self._has_custom_setattr:
+ cls.__setattr__ = _obj_setattr
+
+ return cls
+
+ def _create_slots_class(self):
+ """
+ Build and return a new class with a `__slots__` attribute.
+ """
+ cd = {
+ k: v
+ for k, v in self._cls_dict.items()
+ if k not in tuple(self._attr_names) + ("__dict__", "__weakref__")
+ }
+
+ # If our class doesn't have its own implementation of __setattr__
+ # (either from the user or by us), check the bases, if one of them has
+ # an attrs-made __setattr__, that needs to be reset. We don't walk the
+ # MRO because we only care about our immediate base classes.
+ # XXX: This can be confused by subclassing a slotted attrs class with
+ # XXX: a non-attrs class and subclass the resulting class with an attrs
+ # XXX: class. See `test_slotted_confused` for details. For now that's
+ # XXX: OK with us.
+ if not self._wrote_own_setattr:
+ cd["__attrs_own_setattr__"] = False
+
+ if not self._has_custom_setattr:
+ for base_cls in self._cls.__bases__:
+ if base_cls.__dict__.get("__attrs_own_setattr__", False):
+ cd["__setattr__"] = _obj_setattr
+ break
+
+ # Traverse the MRO to collect existing slots
+ # and check for an existing __weakref__.
+ existing_slots = dict()
+ weakref_inherited = False
+ for base_cls in self._cls.__mro__[1:-1]:
+ if base_cls.__dict__.get("__weakref__", None) is not None:
+ weakref_inherited = True
+ existing_slots.update(
+ {
+ name: getattr(base_cls, name)
+ for name in getattr(base_cls, "__slots__", [])
+ }
+ )
+
+ base_names = set(self._base_names)
+
+ names = self._attr_names
+ if (
+ self._weakref_slot
+ and "__weakref__" not in getattr(self._cls, "__slots__", ())
+ and "__weakref__" not in names
+ and not weakref_inherited
+ ):
+ names += ("__weakref__",)
+
+ # We only add the names of attributes that aren't inherited.
+ # Setting __slots__ to inherited attributes wastes memory.
+ slot_names = [name for name in names if name not in base_names]
+ # There are slots for attributes from current class
+ # that are defined in parent classes.
+ # As their descriptors may be overridden by a child class,
+ # we collect them here and update the class dict
+ reused_slots = {
+ slot: slot_descriptor
+ for slot, slot_descriptor in existing_slots.items()
+ if slot in slot_names
+ }
+ slot_names = [name for name in slot_names if name not in reused_slots]
+ cd.update(reused_slots)
+ if self._cache_hash:
+ slot_names.append(_hash_cache_field)
+ cd["__slots__"] = tuple(slot_names)
+
+ cd["__qualname__"] = self._cls.__qualname__
+
+ # Create new class based on old class and our methods.
+ cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
+
+ # The following is a fix for
+ # <https://github.com/python-attrs/attrs/issues/102>. On Python 3,
+ # if a method mentions `__class__` or uses the no-arg super(), the
+ # compiler will bake a reference to the class in the method itself
+ # as `method.__closure__`. Since we replace the class with a
+ # clone, we rewrite these references so it keeps working.
+ for item in cls.__dict__.values():
+ if isinstance(item, (classmethod, staticmethod)):
+ # Class- and staticmethods hide their functions inside.
+ # These might need to be rewritten as well.
+ closure_cells = getattr(item.__func__, "__closure__", None)
+ elif isinstance(item, property):
+ # Workaround for property `super()` shortcut (PY3-only).
+ # There is no universal way for other descriptors.
+ closure_cells = getattr(item.fget, "__closure__", None)
+ else:
+ closure_cells = getattr(item, "__closure__", None)
+
+ if not closure_cells: # Catch None or the empty list.
+ continue
+ for cell in closure_cells:
+ try:
+ match = cell.cell_contents is self._cls
+ except ValueError: # ValueError: Cell is empty
+ pass
+ else:
+ if match:
+ set_closure_cell(cell, cls)
+
+ return cls
+
+ def add_repr(self, ns):
+ self._cls_dict["__repr__"] = self._add_method_dunders(
+ _make_repr(self._attrs, ns, self._cls)
+ )
+ return self
+
+ def add_str(self):
+ repr = self._cls_dict.get("__repr__")
+ if repr is None:
+ raise ValueError(
+ "__str__ can only be generated if a __repr__ exists."
+ )
+
+ def __str__(self):
+ return self.__repr__()
+
+ self._cls_dict["__str__"] = self._add_method_dunders(__str__)
+ return self
+
+ def _make_getstate_setstate(self):
+ """
+ Create custom __setstate__ and __getstate__ methods.
+ """
+ # __weakref__ is not writable.
+ state_attr_names = tuple(
+ an for an in self._attr_names if an != "__weakref__"
+ )
+
+ def slots_getstate(self):
+ """
+ Automatically created by attrs.
+ """
+ return tuple(getattr(self, name) for name in state_attr_names)
+
+ hash_caching_enabled = self._cache_hash
+
+ def slots_setstate(self, state):
+ """
+ Automatically created by attrs.
+ """
+ __bound_setattr = _obj_setattr.__get__(self, Attribute)
+ for name, value in zip(state_attr_names, state):
+ __bound_setattr(name, value)
+
+ # The hash code cache is not included when the object is
+ # serialized, but it still needs to be initialized to None to
+ # indicate that the first call to __hash__ should be a cache
+ # miss.
+ if hash_caching_enabled:
+ __bound_setattr(_hash_cache_field, None)
+
+ return slots_getstate, slots_setstate
+
+ def make_unhashable(self):
+ self._cls_dict["__hash__"] = None
+ return self
+
+ def add_hash(self):
+ self._cls_dict["__hash__"] = self._add_method_dunders(
+ _make_hash(
+ self._cls,
+ self._attrs,
+ frozen=self._frozen,
+ cache_hash=self._cache_hash,
+ )
+ )
+
+ return self
+
+ def add_init(self):
+ self._cls_dict["__init__"] = self._add_method_dunders(
+ _make_init(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr,
+ attrs_init=False,
+ )
+ )
+
+ return self
+
+ def add_match_args(self):
+ self._cls_dict["__match_args__"] = tuple(
+ field.name
+ for field in self._attrs
+ if field.init and not field.kw_only
+ )
+
+ def add_attrs_init(self):
+ self._cls_dict["__attrs_init__"] = self._add_method_dunders(
+ _make_init(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr,
+ attrs_init=True,
+ )
+ )
+
+ return self
+
+ def add_eq(self):
+ cd = self._cls_dict
+
+ cd["__eq__"] = self._add_method_dunders(
+ _make_eq(self._cls, self._attrs)
+ )
+ cd["__ne__"] = self._add_method_dunders(_make_ne())
+
+ return self
+
+ def add_order(self):
+ cd = self._cls_dict
+
+ cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = (
+ self._add_method_dunders(meth)
+ for meth in _make_order(self._cls, self._attrs)
+ )
+
+ return self
+
+ def add_setattr(self):
+ if self._frozen:
+ return self
+
+ sa_attrs = {}
+ for a in self._attrs:
+ on_setattr = a.on_setattr or self._on_setattr
+ if on_setattr and on_setattr is not setters.NO_OP:
+ sa_attrs[a.name] = a, on_setattr
+
+ if not sa_attrs:
+ return self
+
+ if self._has_custom_setattr:
+ # We need to write a __setattr__ but there already is one!
+ raise ValueError(
+ "Can't combine custom __setattr__ with on_setattr hooks."
+ )
+
+ # docstring comes from _add_method_dunders
+ def __setattr__(self, name, val):
+ try:
+ a, hook = sa_attrs[name]
+ except KeyError:
+ nval = val
+ else:
+ nval = hook(self, a, val)
+
+ _obj_setattr(self, name, nval)
+
+ self._cls_dict["__attrs_own_setattr__"] = True
+ self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__)
+ self._wrote_own_setattr = True
+
+ return self
+
+ def _add_method_dunders(self, method):
+ """
+ Add __module__ and __qualname__ to a *method* if possible.
+ """
+ try:
+ method.__module__ = self._cls.__module__
+ except AttributeError:
+ pass
+
+ try:
+ method.__qualname__ = ".".join(
+ (self._cls.__qualname__, method.__name__)
+ )
+ except AttributeError:
+ pass
+
+ try:
+ method.__doc__ = "Method generated by attrs for class %s." % (
+ self._cls.__qualname__,
+ )
+ except AttributeError:
+ pass
+
+ return method
+
+
+def _determine_attrs_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ raise ValueError("Don't mix `cmp` with `eq' and `order`.")
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ return cmp, cmp
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq = default_eq
+
+ if order is None:
+ order = eq
+
+ if eq is False and order is True:
+ raise ValueError("`order` can only be True if `eq` is True too.")
+
+ return eq, order
+
+
+def _determine_attrib_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ raise ValueError("Don't mix `cmp` with `eq' and `order`.")
+
+ def decide_callable_or_boolean(value):
+ """
+ Decide whether a key function is used.
+ """
+ if callable(value):
+ value, key = True, value
+ else:
+ key = None
+ return value, key
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ cmp, cmp_key = decide_callable_or_boolean(cmp)
+ return cmp, cmp_key, cmp, cmp_key
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq, eq_key = default_eq, None
+ else:
+ eq, eq_key = decide_callable_or_boolean(eq)
+
+ if order is None:
+ order, order_key = eq, eq_key
+ else:
+ order, order_key = decide_callable_or_boolean(order)
+
+ if eq is False and order is True:
+ raise ValueError("`order` can only be True if `eq` is True too.")
+
+ return eq, eq_key, order, order_key
+
+
+def _determine_whether_to_implement(
+ cls, flag, auto_detect, dunders, default=True
+):
+ """
+ Check whether we should implement a set of methods for *cls*.
+
+ *flag* is the argument passed into @attr.s like 'init', *auto_detect* the
+ same as passed into @attr.s and *dunders* is a tuple of attribute names
+ whose presence signal that the user has implemented it themselves.
+
+ Return *default* if no reason for either for or against is found.
+ """
+ if flag is True or flag is False:
+ return flag
+
+ if flag is None and auto_detect is False:
+ return default
+
+ # Logically, flag is None and auto_detect is True here.
+ for dunder in dunders:
+ if _has_own_attribute(cls, dunder):
+ return False
+
+ return default
+
+
+def attrs(
+ maybe_cls=None,
+ these=None,
+ repr_ns=None,
+ repr=None,
+ cmp=None,
+ hash=None,
+ init=None,
+ slots=False,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=False,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=False,
+ eq=None,
+ order=None,
+ auto_detect=False,
+ collect_by_mro=False,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+ match_args=True,
+):
+ r"""
+ A class decorator that adds `dunder
+ <https://wiki.python.org/moin/DunderAlias>`_\ -methods according to the
+ specified attributes using `attr.ib` or the *these* argument.
+
+ :param these: A dictionary of name to `attr.ib` mappings. This is
+ useful to avoid the definition of your attributes within the class body
+ because you can't (e.g. if you want to add ``__repr__`` methods to
+ Django models) or don't want to.
+
+ If *these* is not ``None``, ``attrs`` will *not* search the class body
+ for attributes and will *not* remove any attributes from it.
+
+ If *these* is an ordered dict (`dict` on Python 3.6+,
+ `collections.OrderedDict` otherwise), the order is deduced from
+ the order of the attributes inside *these*. Otherwise the order
+ of the definition of the attributes is used.
+
+ :type these: `dict` of `str` to `attr.ib`
+
+ :param str repr_ns: When using nested classes, there's no way in Python 2
+ to automatically detect that. Therefore it's possible to set the
+ namespace explicitly for a more meaningful ``repr`` output.
+ :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*,
+ *order*, and *hash* arguments explicitly, assume they are set to
+ ``True`` **unless any** of the involved methods for one of the
+ arguments is implemented in the *current* class (i.e. it is *not*
+ inherited from some base class).
+
+ So for example by implementing ``__eq__`` on a class yourself,
+ ``attrs`` will deduce ``eq=False`` and will create *neither*
+ ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible
+ ``__ne__`` by default, so it *should* be enough to only implement
+ ``__eq__`` in most cases).
+
+ .. warning::
+
+ If you prevent ``attrs`` from creating the ordering methods for you
+ (``order=False``, e.g. by implementing ``__le__``), it becomes
+ *your* responsibility to make sure its ordering is sound. The best
+ way is to use the `functools.total_ordering` decorator.
+
+
+ Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*,
+ *cmp*, or *hash* overrides whatever *auto_detect* would determine.
+
+ *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises
+ an `attrs.exceptions.PythonTooOldError`.
+
+ :param bool repr: Create a ``__repr__`` method with a human readable
+ representation of ``attrs`` attributes..
+ :param bool str: Create a ``__str__`` method that is identical to
+ ``__repr__``. This is usually not necessary except for
+ `Exception`\ s.
+ :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__``
+ and ``__ne__`` methods that check two instances for equality.
+
+ They compare the instances as if they were tuples of their ``attrs``
+ attributes if and only if the types of both classes are *identical*!
+ :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``,
+ ``__gt__``, and ``__ge__`` methods that behave like *eq* above and
+ allow instances to be ordered. If ``None`` (default) mirror value of
+ *eq*.
+ :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq*
+ and *order* to the same value. Must not be mixed with *eq* or *order*.
+ :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method
+ is generated according how *eq* and *frozen* are set.
+
+ 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you.
+ 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to
+ None, marking it unhashable (which it is).
+ 3. If *eq* is False, ``__hash__`` will be left untouched meaning the
+ ``__hash__`` method of the base class will be used (if base class is
+ ``object``, this means it will fall back to id-based hashing.).
+
+ Although not recommended, you can decide for yourself and force
+ ``attrs`` to create one (e.g. if the class is immutable even though you
+ didn't freeze it programmatically) by passing ``True`` or not. Both of
+ these cases are rather special and should be used carefully.
+
+ See our documentation on `hashing`, Python's documentation on
+ `object.__hash__`, and the `GitHub issue that led to the default \
+ behavior <https://github.com/python-attrs/attrs/issues/136>`_ for more
+ details.
+ :param bool init: Create a ``__init__`` method that initializes the
+ ``attrs`` attributes. Leading underscores are stripped for the argument
+ name. If a ``__attrs_pre_init__`` method exists on the class, it will
+ be called before the class is initialized. If a ``__attrs_post_init__``
+ method exists on the class, it will be called after the class is fully
+ initialized.
+
+ If ``init`` is ``False``, an ``__attrs_init__`` method will be
+ injected instead. This allows you to define a custom ``__init__``
+ method that can do pre-init work such as ``super().__init__()``,
+ and then call ``__attrs_init__()`` and ``__attrs_post_init__()``.
+ :param bool slots: Create a `slotted class <slotted classes>` that's more
+ memory-efficient. Slotted classes are generally superior to the default
+ dict classes, but have some gotchas you should know about, so we
+ encourage you to read the `glossary entry <slotted classes>`.
+ :param bool frozen: Make instances immutable after initialization. If
+ someone attempts to modify a frozen instance,
+ `attr.exceptions.FrozenInstanceError` is raised.
+
+ .. note::
+
+ 1. This is achieved by installing a custom ``__setattr__`` method
+ on your class, so you can't implement your own.
+
+ 2. True immutability is impossible in Python.
+
+ 3. This *does* have a minor a runtime performance `impact
+ <how-frozen>` when initializing new instances. In other words:
+ ``__init__`` is slightly slower with ``frozen=True``.
+
+ 4. If a class is frozen, you cannot modify ``self`` in
+ ``__attrs_post_init__`` or a self-written ``__init__``. You can
+ circumvent that limitation by using
+ ``object.__setattr__(self, "attribute_name", value)``.
+
+ 5. Subclasses of a frozen class are frozen too.
+
+ :param bool weakref_slot: Make instances weak-referenceable. This has no
+ effect unless ``slots`` is also enabled.
+ :param bool auto_attribs: If ``True``, collect :pep:`526`-annotated
+ attributes (Python 3.6 and later only) from the class body.
+
+ In this case, you **must** annotate every field. If ``attrs``
+ encounters a field that is set to an `attr.ib` but lacks a type
+ annotation, an `attr.exceptions.UnannotatedAttributeError` is
+ raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't
+ want to set a type.
+
+ If you assign a value to those attributes (e.g. ``x: int = 42``), that
+ value becomes the default value like if it were passed using
+ ``attr.ib(default=42)``. Passing an instance of `attrs.Factory` also
+ works as expected in most cases (see warning below).
+
+ Attributes annotated as `typing.ClassVar`, and attributes that are
+ neither annotated nor set to an `attr.ib` are **ignored**.
+
+ .. warning::
+ For features that use the attribute name to create decorators (e.g.
+ `validators <validators>`), you still *must* assign `attr.ib` to
+ them. Otherwise Python will either not find the name or try to use
+ the default value to call e.g. ``validator`` on it.
+
+ These errors can be quite confusing and probably the most common bug
+ report on our bug tracker.
+
+ :param bool kw_only: Make all attributes keyword-only (Python 3+)
+ in the generated ``__init__`` (if ``init`` is ``False``, this
+ parameter is ignored).
+ :param bool cache_hash: Ensure that the object's hash code is computed
+ only once and stored on the object. If this is set to ``True``,
+ hashing must be either explicitly or implicitly enabled for this
+ class. If the hash code is cached, avoid any reassignments of
+ fields involved in hash code computation or mutations of the objects
+ those fields point to after object creation. If such changes occur,
+ the behavior of the object's hash code is undefined.
+ :param bool auto_exc: If the class subclasses `BaseException`
+ (which implicitly includes any subclass of any exception), the
+ following happens to behave like a well-behaved Python exceptions
+ class:
+
+ - the values for *eq*, *order*, and *hash* are ignored and the
+ instances compare and hash by the instance's ids (N.B. ``attrs`` will
+ *not* remove existing implementations of ``__hash__`` or the equality
+ methods. It just won't add own ones.),
+ - all attributes that are either passed into ``__init__`` or have a
+ default value are additionally available as a tuple in the ``args``
+ attribute,
+ - the value of *str* is ignored leaving ``__str__`` to base classes.
+ :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs``
+ collects attributes from base classes. The default behavior is
+ incorrect in certain cases of multiple inheritance. It should be on by
+ default but is kept off for backward-compatibility.
+
+ See issue `#428 <https://github.com/python-attrs/attrs/issues/428>`_ for
+ more details.
+
+ :param Optional[bool] getstate_setstate:
+ .. note::
+ This is usually only interesting for slotted classes and you should
+ probably just set *auto_detect* to `True`.
+
+ If `True`, ``__getstate__`` and
+ ``__setstate__`` are generated and attached to the class. This is
+ necessary for slotted classes to be pickleable. If left `None`, it's
+ `True` by default for slotted classes and ``False`` for dict classes.
+
+ If *auto_detect* is `True`, and *getstate_setstate* is left `None`,
+ and **either** ``__getstate__`` or ``__setstate__`` is detected directly
+ on the class (i.e. not inherited), it is set to `False` (this is usually
+ what you want).
+
+ :param on_setattr: A callable that is run whenever the user attempts to set
+ an attribute (either by assignment like ``i.x = 42`` or by using
+ `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments
+ as validators: the instance, the attribute that is being modified, and
+ the new value.
+
+ If no exception is raised, the attribute is set to the return value of
+ the callable.
+
+ If a list of callables is passed, they're automatically wrapped in an
+ `attrs.setters.pipe`.
+ :type on_setattr: `callable`, or a list of callables, or `None`, or
+ `attrs.setters.NO_OP`
+
+ :param Optional[callable] field_transformer:
+ A function that is called with the original class object and all
+ fields right before ``attrs`` finalizes the class. You can use
+ this, e.g., to automatically add converters or validators to
+ fields based on their types. See `transform-fields` for more details.
+
+ :param bool match_args:
+ If `True` (default), set ``__match_args__`` on the class to support
+ :pep:`634` (Structural Pattern Matching). It is a tuple of all
+ non-keyword-only ``__init__`` parameter names on Python 3.10 and later.
+ Ignored on older Python versions.
+
+ .. versionadded:: 16.0.0 *slots*
+ .. versionadded:: 16.1.0 *frozen*
+ .. versionadded:: 16.3.0 *str*
+ .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``.
+ .. versionchanged:: 17.1.0
+ *hash* supports ``None`` as value which is also the default now.
+ .. versionadded:: 17.3.0 *auto_attribs*
+ .. versionchanged:: 18.1.0
+ If *these* is passed, no attributes are deleted from the class body.
+ .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
+ .. versionadded:: 18.2.0 *weakref_slot*
+ .. deprecated:: 18.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
+ `DeprecationWarning` if the classes compared are subclasses of
+ each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
+ to each other.
+ .. versionchanged:: 19.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider
+ subclasses comparable anymore.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionadded:: 18.2.0 *cache_hash*
+ .. versionadded:: 19.1.0 *auto_exc*
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *auto_detect*
+ .. versionadded:: 20.1.0 *collect_by_mro*
+ .. versionadded:: 20.1.0 *getstate_setstate*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionadded:: 20.3.0 *field_transformer*
+ .. versionchanged:: 21.1.0
+ ``init=False`` injects ``__attrs_init__``
+ .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__``
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
+ .. versionadded:: 21.3.0 *match_args*
+ """
+ eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None)
+ hash_ = hash # work around the lack of nonlocal
+
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
+ def wrap(cls):
+ is_frozen = frozen or _has_frozen_base_class(cls)
+ is_exc = auto_exc is True and issubclass(cls, BaseException)
+ has_own_setattr = auto_detect and _has_own_attribute(
+ cls, "__setattr__"
+ )
+
+ if has_own_setattr and is_frozen:
+ raise ValueError("Can't freeze a class with a custom __setattr__.")
+
+ builder = _ClassBuilder(
+ cls,
+ these,
+ slots,
+ is_frozen,
+ weakref_slot,
+ _determine_whether_to_implement(
+ cls,
+ getstate_setstate,
+ auto_detect,
+ ("__getstate__", "__setstate__"),
+ default=slots,
+ ),
+ auto_attribs,
+ kw_only,
+ cache_hash,
+ is_exc,
+ collect_by_mro,
+ on_setattr,
+ has_own_setattr,
+ field_transformer,
+ )
+ if _determine_whether_to_implement(
+ cls, repr, auto_detect, ("__repr__",)
+ ):
+ builder.add_repr(repr_ns)
+ if str is True:
+ builder.add_str()
+
+ eq = _determine_whether_to_implement(
+ cls, eq_, auto_detect, ("__eq__", "__ne__")
+ )
+ if not is_exc and eq is True:
+ builder.add_eq()
+ if not is_exc and _determine_whether_to_implement(
+ cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__")
+ ):
+ builder.add_order()
+
+ builder.add_setattr()
+
+ if (
+ hash_ is None
+ and auto_detect is True
+ and _has_own_attribute(cls, "__hash__")
+ ):
+ hash = False
+ else:
+ hash = hash_
+ if hash is not True and hash is not False and hash is not None:
+ # Can't use `hash in` because 1 == True for example.
+ raise TypeError(
+ "Invalid value for hash. Must be True, False, or None."
+ )
+ elif hash is False or (hash is None and eq is False) or is_exc:
+ # Don't do anything. Should fall back to __object__'s __hash__
+ # which is by id.
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled."
+ )
+ elif hash is True or (
+ hash is None and eq is True and is_frozen is True
+ ):
+ # Build a __hash__ if told so, or if it's safe.
+ builder.add_hash()
+ else:
+ # Raise TypeError on attempts to hash.
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled."
+ )
+ builder.make_unhashable()
+
+ if _determine_whether_to_implement(
+ cls, init, auto_detect, ("__init__",)
+ ):
+ builder.add_init()
+ else:
+ builder.add_attrs_init()
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " init must be True."
+ )
+
+ if (
+ PY310
+ and match_args
+ and not _has_own_attribute(cls, "__match_args__")
+ ):
+ builder.add_match_args()
+
+ return builder.build_class()
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+ else:
+ return wrap(maybe_cls)
+
+
+_attrs = attrs
+"""
+Internal alias so we can use it in functions that take an argument called
+*attrs*.
+"""
+
+
+def _has_frozen_base_class(cls):
+ """
+ Check whether *cls* has a frozen ancestor by looking at its
+ __setattr__.
+ """
+ return cls.__setattr__ is _frozen_setattrs
+
+
+def _generate_unique_filename(cls, func_name):
+ """
+ Create a "filename" suitable for a function being generated.
+ """
+ unique_filename = "<attrs generated {} {}.{}>".format(
+ func_name,
+ cls.__module__,
+ getattr(cls, "__qualname__", cls.__name__),
+ )
+ return unique_filename
+
+
+def _make_hash(cls, attrs, frozen, cache_hash):
+ attrs = tuple(
+ a for a in attrs if a.hash is True or (a.hash is None and a.eq is True)
+ )
+
+ tab = " "
+
+ unique_filename = _generate_unique_filename(cls, "hash")
+ type_hash = hash(unique_filename)
+ # If eq is custom generated, we need to include the functions in globs
+ globs = {}
+
+ hash_def = "def __hash__(self"
+ hash_func = "hash(("
+ closing_braces = "))"
+ if not cache_hash:
+ hash_def += "):"
+ else:
+ hash_def += ", *"
+
+ hash_def += (
+ ", _cache_wrapper="
+ + "__import__('attr._make')._make._CacheHashWrapper):"
+ )
+ hash_func = "_cache_wrapper(" + hash_func
+ closing_braces += ")"
+
+ method_lines = [hash_def]
+
+ def append_hash_computation_lines(prefix, indent):
+ """
+ Generate the code for actually computing the hash code.
+ Below this will either be returned directly or used to compute
+ a value which is then cached, depending on the value of cache_hash
+ """
+
+ method_lines.extend(
+ [
+ indent + prefix + hash_func,
+ indent + " %d," % (type_hash,),
+ ]
+ )
+
+ for a in attrs:
+ if a.eq_key:
+ cmp_name = "_%s_key" % (a.name,)
+ globs[cmp_name] = a.eq_key
+ method_lines.append(
+ indent + " %s(self.%s)," % (cmp_name, a.name)
+ )
+ else:
+ method_lines.append(indent + " self.%s," % a.name)
+
+ method_lines.append(indent + " " + closing_braces)
+
+ if cache_hash:
+ method_lines.append(tab + "if self.%s is None:" % _hash_cache_field)
+ if frozen:
+ append_hash_computation_lines(
+ "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2
+ )
+ method_lines.append(tab * 2 + ")") # close __setattr__
+ else:
+ append_hash_computation_lines(
+ "self.%s = " % _hash_cache_field, tab * 2
+ )
+ method_lines.append(tab + "return self.%s" % _hash_cache_field)
+ else:
+ append_hash_computation_lines("return ", tab)
+
+ script = "\n".join(method_lines)
+ return _make_method("__hash__", script, unique_filename, globs)
+
+
+def _add_hash(cls, attrs):
+ """
+ Add a hash method to *cls*.
+ """
+ cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False)
+ return cls
+
+
+def _make_ne():
+ """
+ Create __ne__ method.
+ """
+
+ def __ne__(self, other):
+ """
+ Check equality and either forward a NotImplemented or
+ return the result negated.
+ """
+ result = self.__eq__(other)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return not result
+
+ return __ne__
+
+
+def _make_eq(cls, attrs):
+ """
+ Create __eq__ method for *cls* with *attrs*.
+ """
+ attrs = [a for a in attrs if a.eq]
+
+ unique_filename = _generate_unique_filename(cls, "eq")
+ lines = [
+ "def __eq__(self, other):",
+ " if other.__class__ is not self.__class__:",
+ " return NotImplemented",
+ ]
+
+ # We can't just do a big self.x = other.x and... clause due to
+ # irregularities like nan == nan is false but (nan,) == (nan,) is true.
+ globs = {}
+ if attrs:
+ lines.append(" return (")
+ others = [" ) == ("]
+ for a in attrs:
+ if a.eq_key:
+ cmp_name = "_%s_key" % (a.name,)
+ # Add the key function to the global namespace
+ # of the evaluated function.
+ globs[cmp_name] = a.eq_key
+ lines.append(
+ " %s(self.%s),"
+ % (
+ cmp_name,
+ a.name,
+ )
+ )
+ others.append(
+ " %s(other.%s),"
+ % (
+ cmp_name,
+ a.name,
+ )
+ )
+ else:
+ lines.append(" self.%s," % (a.name,))
+ others.append(" other.%s," % (a.name,))
+
+ lines += others + [" )"]
+ else:
+ lines.append(" return True")
+
+ script = "\n".join(lines)
+
+ return _make_method("__eq__", script, unique_filename, globs)
+
+
+def _make_order(cls, attrs):
+ """
+ Create ordering methods for *cls* with *attrs*.
+ """
+ attrs = [a for a in attrs if a.order]
+
+ def attrs_to_tuple(obj):
+ """
+ Save us some typing.
+ """
+ return tuple(
+ key(value) if key else value
+ for value, key in (
+ (getattr(obj, a.name), a.order_key) for a in attrs
+ )
+ )
+
+ def __lt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) < attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __le__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) <= attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __gt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) > attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __ge__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) >= attrs_to_tuple(other)
+
+ return NotImplemented
+
+ return __lt__, __le__, __gt__, __ge__
+
+
+def _add_eq(cls, attrs=None):
+ """
+ Add equality methods to *cls* with *attrs*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ cls.__eq__ = _make_eq(cls, attrs)
+ cls.__ne__ = _make_ne()
+
+ return cls
+
+
+if HAS_F_STRINGS:
+
+ def _make_repr(attrs, ns, cls):
+ unique_filename = _generate_unique_filename(cls, "repr")
+ # Figure out which attributes to include, and which function to use to
+ # format them. The a.repr value can be either bool or a custom
+ # callable.
+ attr_names_with_reprs = tuple(
+ (a.name, (repr if a.repr is True else a.repr), a.init)
+ for a in attrs
+ if a.repr is not False
+ )
+ globs = {
+ name + "_repr": r
+ for name, r, _ in attr_names_with_reprs
+ if r != repr
+ }
+ globs["_compat"] = _compat
+ globs["AttributeError"] = AttributeError
+ globs["NOTHING"] = NOTHING
+ attribute_fragments = []
+ for name, r, i in attr_names_with_reprs:
+ accessor = (
+ "self." + name
+ if i
+ else 'getattr(self, "' + name + '", NOTHING)'
+ )
+ fragment = (
+ "%s={%s!r}" % (name, accessor)
+ if r == repr
+ else "%s={%s_repr(%s)}" % (name, name, accessor)
+ )
+ attribute_fragments.append(fragment)
+ repr_fragment = ", ".join(attribute_fragments)
+
+ if ns is None:
+ cls_name_fragment = (
+ '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}'
+ )
+ else:
+ cls_name_fragment = ns + ".{self.__class__.__name__}"
+
+ lines = [
+ "def __repr__(self):",
+ " try:",
+ " already_repring = _compat.repr_context.already_repring",
+ " except AttributeError:",
+ " already_repring = {id(self),}",
+ " _compat.repr_context.already_repring = already_repring",
+ " else:",
+ " if id(self) in already_repring:",
+ " return '...'",
+ " else:",
+ " already_repring.add(id(self))",
+ " try:",
+ " return f'%s(%s)'" % (cls_name_fragment, repr_fragment),
+ " finally:",
+ " already_repring.remove(id(self))",
+ ]
+
+ return _make_method(
+ "__repr__", "\n".join(lines), unique_filename, globs=globs
+ )
+
+else:
+
+ def _make_repr(attrs, ns, _):
+ """
+ Make a repr method that includes relevant *attrs*, adding *ns* to the
+ full name.
+ """
+
+ # Figure out which attributes to include, and which function to use to
+ # format them. The a.repr value can be either bool or a custom
+ # callable.
+ attr_names_with_reprs = tuple(
+ (a.name, repr if a.repr is True else a.repr)
+ for a in attrs
+ if a.repr is not False
+ )
+
+ def __repr__(self):
+ """
+ Automatically created by attrs.
+ """
+ try:
+ already_repring = _compat.repr_context.already_repring
+ except AttributeError:
+ already_repring = set()
+ _compat.repr_context.already_repring = already_repring
+
+ if id(self) in already_repring:
+ return "..."
+ real_cls = self.__class__
+ if ns is None:
+ class_name = real_cls.__qualname__.rsplit(">.", 1)[-1]
+ else:
+ class_name = ns + "." + real_cls.__name__
+
+ # Since 'self' remains on the stack (i.e.: strongly referenced)
+ # for the duration of this call, it's safe to depend on id(...)
+ # stability, and not need to track the instance and therefore
+ # worry about properties like weakref- or hash-ability.
+ already_repring.add(id(self))
+ try:
+ result = [class_name, "("]
+ first = True
+ for name, attr_repr in attr_names_with_reprs:
+ if first:
+ first = False
+ else:
+ result.append(", ")
+ result.extend(
+ (name, "=", attr_repr(getattr(self, name, NOTHING)))
+ )
+ return "".join(result) + ")"
+ finally:
+ already_repring.remove(id(self))
+
+ return __repr__
+
+
+def _add_repr(cls, ns=None, attrs=None):
+ """
+ Add a repr method to *cls*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ cls.__repr__ = _make_repr(attrs, ns, cls)
+ return cls
+
+
+def fields(cls):
+ """
+ Return the tuple of ``attrs`` attributes for a class.
+
+ The tuple also allows accessing the fields by their names (see below for
+ examples).
+
+ :param type cls: Class to introspect.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ :rtype: tuple (with name accessors) of `attrs.Attribute`
+
+ .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields
+ by name.
+ """
+ if not isinstance(cls, type):
+ raise TypeError("Passed object must be a class.")
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is None:
+ raise NotAnAttrsClassError(
+ "{cls!r} is not an attrs-decorated class.".format(cls=cls)
+ )
+ return attrs
+
+
+def fields_dict(cls):
+ """
+ Return an ordered dictionary of ``attrs`` attributes for a class, whose
+ keys are the attribute names.
+
+ :param type cls: Class to introspect.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ :rtype: an ordered dict where keys are attribute names and values are
+ `attrs.Attribute`\\ s. This will be a `dict` if it's
+ naturally ordered like on Python 3.6+ or an
+ :class:`~collections.OrderedDict` otherwise.
+
+ .. versionadded:: 18.1.0
+ """
+ if not isinstance(cls, type):
+ raise TypeError("Passed object must be a class.")
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is None:
+ raise NotAnAttrsClassError(
+ "{cls!r} is not an attrs-decorated class.".format(cls=cls)
+ )
+ return ordered_dict((a.name, a) for a in attrs)
+
+
+def validate(inst):
+ """
+ Validate all attributes on *inst* that have a validator.
+
+ Leaves all exceptions through.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ """
+ if _config._run_validators is False:
+ return
+
+ for a in fields(inst.__class__):
+ v = a.validator
+ if v is not None:
+ v(inst, a, getattr(inst, a.name))
+
+
+def _is_slot_cls(cls):
+ return "__slots__" in cls.__dict__
+
+
+def _is_slot_attr(a_name, base_attr_map):
+ """
+ Check if the attribute name comes from a slot class.
+ """
+ return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name])
+
+
+def _make_init(
+ cls,
+ attrs,
+ pre_init,
+ post_init,
+ frozen,
+ slots,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ cls_on_setattr,
+ attrs_init,
+):
+ has_cls_on_setattr = (
+ cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP
+ )
+
+ if frozen and has_cls_on_setattr:
+ raise ValueError("Frozen classes can't use on_setattr.")
+
+ needs_cached_setattr = cache_hash or frozen
+ filtered_attrs = []
+ attr_dict = {}
+ for a in attrs:
+ if not a.init and a.default is NOTHING:
+ continue
+
+ filtered_attrs.append(a)
+ attr_dict[a.name] = a
+
+ if a.on_setattr is not None:
+ if frozen is True:
+ raise ValueError("Frozen classes can't use on_setattr.")
+
+ needs_cached_setattr = True
+ elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP:
+ needs_cached_setattr = True
+
+ unique_filename = _generate_unique_filename(cls, "init")
+
+ script, globs, annotations = _attrs_to_init_script(
+ filtered_attrs,
+ frozen,
+ slots,
+ pre_init,
+ post_init,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ has_cls_on_setattr,
+ attrs_init,
+ )
+ if cls.__module__ in sys.modules:
+ # This makes typing.get_type_hints(CLS.__init__) resolve string types.
+ globs.update(sys.modules[cls.__module__].__dict__)
+
+ globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
+
+ if needs_cached_setattr:
+ # Save the lookup overhead in __init__ if we need to circumvent
+ # setattr hooks.
+ globs["_setattr"] = _obj_setattr
+
+ init = _make_method(
+ "__attrs_init__" if attrs_init else "__init__",
+ script,
+ unique_filename,
+ globs,
+ )
+ init.__annotations__ = annotations
+
+ return init
+
+
+def _setattr(attr_name, value_var, has_on_setattr):
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*.
+ """
+ return "_setattr(self, '%s', %s)" % (attr_name, value_var)
+
+
+def _setattr_with_converter(attr_name, value_var, has_on_setattr):
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*, but run
+ its converter first.
+ """
+ return "_setattr(self, '%s', %s(%s))" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
+
+def _assign(attr_name, value, has_on_setattr):
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise
+ relegate to _setattr.
+ """
+ if has_on_setattr:
+ return _setattr(attr_name, value, True)
+
+ return "self.%s = %s" % (attr_name, value)
+
+
+def _assign_with_converter(attr_name, value_var, has_on_setattr):
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment after
+ conversion. Otherwise relegate to _setattr_with_converter.
+ """
+ if has_on_setattr:
+ return _setattr_with_converter(attr_name, value_var, True)
+
+ return "self.%s = %s(%s)" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
+
+def _attrs_to_init_script(
+ attrs,
+ frozen,
+ slots,
+ pre_init,
+ post_init,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ has_cls_on_setattr,
+ attrs_init,
+):
+ """
+ Return a script of an initializer for *attrs* and a dict of globals.
+
+ The globals are expected by the generated script.
+
+ If *frozen* is True, we cannot set the attributes directly so we use
+ a cached ``object.__setattr__``.
+ """
+ lines = []
+ if pre_init:
+ lines.append("self.__attrs_pre_init__()")
+
+ if frozen is True:
+ if slots is True:
+ fmt_setter = _setattr
+ fmt_setter_with_converter = _setattr_with_converter
+ else:
+ # Dict frozen classes assign directly to __dict__.
+ # But only if the attribute doesn't come from an ancestor slot
+ # class.
+ # Note _inst_dict will be used again below if cache_hash is True
+ lines.append("_inst_dict = self.__dict__")
+
+ def fmt_setter(attr_name, value_var, has_on_setattr):
+ if _is_slot_attr(attr_name, base_attr_map):
+ return _setattr(attr_name, value_var, has_on_setattr)
+
+ return "_inst_dict['%s'] = %s" % (attr_name, value_var)
+
+ def fmt_setter_with_converter(
+ attr_name, value_var, has_on_setattr
+ ):
+ if has_on_setattr or _is_slot_attr(attr_name, base_attr_map):
+ return _setattr_with_converter(
+ attr_name, value_var, has_on_setattr
+ )
+
+ return "_inst_dict['%s'] = %s(%s)" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
+ else:
+ # Not frozen.
+ fmt_setter = _assign
+ fmt_setter_with_converter = _assign_with_converter
+
+ args = []
+ kw_only_args = []
+ attrs_to_validate = []
+
+ # This is a dictionary of names to validator and converter callables.
+ # Injecting this into __init__ globals lets us avoid lookups.
+ names_for_globals = {}
+ annotations = {"return": None}
+
+ for a in attrs:
+ if a.validator:
+ attrs_to_validate.append(a)
+
+ attr_name = a.name
+ has_on_setattr = a.on_setattr is not None or (
+ a.on_setattr is not setters.NO_OP and has_cls_on_setattr
+ )
+ arg_name = a.name.lstrip("_")
+
+ has_factory = isinstance(a.default, Factory)
+ if has_factory and a.default.takes_self:
+ maybe_self = "self"
+ else:
+ maybe_self = ""
+
+ if a.init is False:
+ if has_factory:
+ init_factory_name = _init_factory_pat.format(a.name)
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + "(%s)" % (maybe_self,),
+ has_on_setattr,
+ )
+ )
+ conv_name = _init_converter_pat % (a.name,)
+ names_for_globals[conv_name] = a.converter
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ init_factory_name + "(%s)" % (maybe_self,),
+ has_on_setattr,
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ else:
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ "attr_dict['%s'].default" % (attr_name,),
+ has_on_setattr,
+ )
+ )
+ conv_name = _init_converter_pat % (a.name,)
+ names_for_globals[conv_name] = a.converter
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ "attr_dict['%s'].default" % (attr_name,),
+ has_on_setattr,
+ )
+ )
+ elif a.default is not NOTHING and not has_factory:
+ arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name)
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
+ )
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
+ else:
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+ elif has_factory:
+ arg = "%s=NOTHING" % (arg_name,)
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+ lines.append("if %s is not NOTHING:" % (arg_name,))
+
+ init_factory_name = _init_factory_pat.format(a.name)
+ if a.converter is not None:
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
+ )
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ )
+ )
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
+ else:
+ lines.append(
+ " " + fmt_setter(attr_name, arg_name, has_on_setattr)
+ )
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ else:
+ if a.kw_only:
+ kw_only_args.append(arg_name)
+ else:
+ args.append(arg_name)
+
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
+ )
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
+ else:
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+ if a.init is True:
+ if a.type is not None and a.converter is None:
+ annotations[arg_name] = a.type
+ elif a.converter is not None:
+ # Try to get the type from the converter.
+ t = _AnnotationExtractor(a.converter).get_first_param_type()
+ if t:
+ annotations[arg_name] = t
+
+ if attrs_to_validate: # we can skip this if there are no validators.
+ names_for_globals["_config"] = _config
+ lines.append("if _config._run_validators is True:")
+ for a in attrs_to_validate:
+ val_name = "__attr_validator_" + a.name
+ attr_name = "__attr_" + a.name
+ lines.append(
+ " %s(self, %s, self.%s)" % (val_name, attr_name, a.name)
+ )
+ names_for_globals[val_name] = a.validator
+ names_for_globals[attr_name] = a
+
+ if post_init:
+ lines.append("self.__attrs_post_init__()")
+
+ # because this is set only after __attrs_post_init__ is called, a crash
+ # will result if post-init tries to access the hash code. This seemed
+ # preferable to setting this beforehand, in which case alteration to
+ # field values during post-init combined with post-init accessing the
+ # hash code would result in silent bugs.
+ if cache_hash:
+ if frozen:
+ if slots:
+ # if frozen and slots, then _setattr defined above
+ init_hash_cache = "_setattr(self, '%s', %s)"
+ else:
+ # if frozen and not slots, then _inst_dict defined above
+ init_hash_cache = "_inst_dict['%s'] = %s"
+ else:
+ init_hash_cache = "self.%s = %s"
+ lines.append(init_hash_cache % (_hash_cache_field, "None"))
+
+ # For exceptions we rely on BaseException.__init__ for proper
+ # initialization.
+ if is_exc:
+ vals = ",".join("self." + a.name for a in attrs if a.init)
+
+ lines.append("BaseException.__init__(self, %s)" % (vals,))
+
+ args = ", ".join(args)
+ if kw_only_args:
+ args += "%s*, %s" % (
+ ", " if args else "", # leading comma
+ ", ".join(kw_only_args), # kw_only args
+ )
+ return (
+ """\
+def {init_name}(self, {args}):
+ {lines}
+""".format(
+ init_name=("__attrs_init__" if attrs_init else "__init__"),
+ args=args,
+ lines="\n ".join(lines) if lines else "pass",
+ ),
+ names_for_globals,
+ annotations,
+ )
+
+
+class Attribute:
+ """
+ *Read-only* representation of an attribute.
+
+ The class has *all* arguments of `attr.ib` (except for ``factory``
+ which is only syntactic sugar for ``default=Factory(...)`` plus the
+ following:
+
+ - ``name`` (`str`): The name of the attribute.
+ - ``inherited`` (`bool`): Whether or not that attribute has been inherited
+ from a base class.
+ - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The callables
+ that are used for comparing and ordering objects by this attribute,
+ respectively. These are set by passing a callable to `attr.ib`'s ``eq``,
+ ``order``, or ``cmp`` arguments. See also :ref:`comparison customization
+ <custom-comparison>`.
+
+ Instances of this class are frequently used for introspection purposes
+ like:
+
+ - `fields` returns a tuple of them.
+ - Validators get them passed as the first argument.
+ - The :ref:`field transformer <transform-fields>` hook receives a list of
+ them.
+
+ .. versionadded:: 20.1.0 *inherited*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.2.0 *inherited* is not taken into account for
+ equality checks and hashing anymore.
+ .. versionadded:: 21.1.0 *eq_key* and *order_key*
+
+ For the full version history of the fields, see `attr.ib`.
+ """
+
+ __slots__ = (
+ "name",
+ "default",
+ "validator",
+ "repr",
+ "eq",
+ "eq_key",
+ "order",
+ "order_key",
+ "hash",
+ "init",
+ "metadata",
+ "type",
+ "converter",
+ "kw_only",
+ "inherited",
+ "on_setattr",
+ )
+
+ def __init__(
+ self,
+ name,
+ default,
+ validator,
+ repr,
+ cmp, # XXX: unused, remove along with other cmp code.
+ hash,
+ init,
+ inherited,
+ metadata=None,
+ type=None,
+ converter=None,
+ kw_only=False,
+ eq=None,
+ eq_key=None,
+ order=None,
+ order_key=None,
+ on_setattr=None,
+ ):
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq_key or eq, order_key or order, True
+ )
+
+ # Cache this descriptor here to speed things up later.
+ bound_setattr = _obj_setattr.__get__(self, Attribute)
+
+ # Despite the big red warning, people *do* instantiate `Attribute`
+ # themselves.
+ bound_setattr("name", name)
+ bound_setattr("default", default)
+ bound_setattr("validator", validator)
+ bound_setattr("repr", repr)
+ bound_setattr("eq", eq)
+ bound_setattr("eq_key", eq_key)
+ bound_setattr("order", order)
+ bound_setattr("order_key", order_key)
+ bound_setattr("hash", hash)
+ bound_setattr("init", init)
+ bound_setattr("converter", converter)
+ bound_setattr(
+ "metadata",
+ (
+ types.MappingProxyType(dict(metadata)) # Shallow copy
+ if metadata
+ else _empty_metadata_singleton
+ ),
+ )
+ bound_setattr("type", type)
+ bound_setattr("kw_only", kw_only)
+ bound_setattr("inherited", inherited)
+ bound_setattr("on_setattr", on_setattr)
+
+ def __setattr__(self, name, value):
+ raise FrozenInstanceError()
+
+ @classmethod
+ def from_counting_attr(cls, name, ca, type=None):
+ # type holds the annotated value. deal with conflicts:
+ if type is None:
+ type = ca.type
+ elif ca.type is not None:
+ raise ValueError(
+ "Type annotation and type argument cannot both be present"
+ )
+ inst_dict = {
+ k: getattr(ca, k)
+ for k in Attribute.__slots__
+ if k
+ not in (
+ "name",
+ "validator",
+ "default",
+ "type",
+ "inherited",
+ ) # exclude methods and deprecated alias
+ }
+ return cls(
+ name=name,
+ validator=ca._validator,
+ default=ca._default,
+ type=type,
+ cmp=None,
+ inherited=False,
+ **inst_dict
+ )
+
+ # Don't use attr.evolve since fields(Attribute) doesn't work
+ def evolve(self, **changes):
+ """
+ Copy *self* and apply *changes*.
+
+ This works similarly to `attr.evolve` but that function does not work
+ with ``Attribute``.
+
+ It is mainly meant to be used for `transform-fields`.
+
+ .. versionadded:: 20.3.0
+ """
+ new = copy.copy(self)
+
+ new._setattrs(changes.items())
+
+ return new
+
+ # Don't use _add_pickle since fields(Attribute) doesn't work
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(
+ getattr(self, name) if name != "metadata" else dict(self.metadata)
+ for name in self.__slots__
+ )
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ self._setattrs(zip(self.__slots__, state))
+
+ def _setattrs(self, name_values_pairs):
+ bound_setattr = _obj_setattr.__get__(self, Attribute)
+ for name, value in name_values_pairs:
+ if name != "metadata":
+ bound_setattr(name, value)
+ else:
+ bound_setattr(
+ name,
+ types.MappingProxyType(dict(value))
+ if value
+ else _empty_metadata_singleton,
+ )
+
+
+_a = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=(name != "metadata"),
+ init=True,
+ inherited=False,
+ )
+ for name in Attribute.__slots__
+]
+
+Attribute = _add_hash(
+ _add_eq(
+ _add_repr(Attribute, attrs=_a),
+ attrs=[a for a in _a if a.name != "inherited"],
+ ),
+ attrs=[a for a in _a if a.hash and a.name != "inherited"],
+)
+
+
+class _CountingAttr:
+ """
+ Intermediate representation of attributes that uses a counter to preserve
+ the order in which the attributes have been defined.
+
+ *Internal* data structure of the attrs library. Running into is most
+ likely the result of a bug like a forgotten `@attr.s` decorator.
+ """
+
+ __slots__ = (
+ "counter",
+ "_default",
+ "repr",
+ "eq",
+ "eq_key",
+ "order",
+ "order_key",
+ "hash",
+ "init",
+ "metadata",
+ "_validator",
+ "converter",
+ "type",
+ "kw_only",
+ "on_setattr",
+ )
+ __attrs_attrs__ = tuple(
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=True,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ )
+ for name in (
+ "counter",
+ "_default",
+ "repr",
+ "eq",
+ "order",
+ "hash",
+ "init",
+ "on_setattr",
+ )
+ ) + (
+ Attribute(
+ name="metadata",
+ default=None,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=False,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ ),
+ )
+ cls_counter = 0
+
+ def __init__(
+ self,
+ default,
+ validator,
+ repr,
+ cmp,
+ hash,
+ init,
+ converter,
+ metadata,
+ type,
+ kw_only,
+ eq,
+ eq_key,
+ order,
+ order_key,
+ on_setattr,
+ ):
+ _CountingAttr.cls_counter += 1
+ self.counter = _CountingAttr.cls_counter
+ self._default = default
+ self._validator = validator
+ self.converter = converter
+ self.repr = repr
+ self.eq = eq
+ self.eq_key = eq_key
+ self.order = order
+ self.order_key = order_key
+ self.hash = hash
+ self.init = init
+ self.metadata = metadata
+ self.type = type
+ self.kw_only = kw_only
+ self.on_setattr = on_setattr
+
+ def validator(self, meth):
+ """
+ Decorator that adds *meth* to the list of validators.
+
+ Returns *meth* unchanged.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._validator is None:
+ self._validator = meth
+ else:
+ self._validator = and_(self._validator, meth)
+ return meth
+
+ def default(self, meth):
+ """
+ Decorator that allows to set the default for an attribute.
+
+ Returns *meth* unchanged.
+
+ :raises DefaultAlreadySetError: If default has been set before.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._default is not NOTHING:
+ raise DefaultAlreadySetError()
+
+ self._default = Factory(meth, takes_self=True)
+
+ return meth
+
+
+_CountingAttr = _add_eq(_add_repr(_CountingAttr))
+
+
+class Factory:
+ """
+ Stores a factory callable.
+
+ If passed as the default value to `attrs.field`, the factory is used to
+ generate a new value.
+
+ :param callable factory: A callable that takes either none or exactly one
+ mandatory positional argument depending on *takes_self*.
+ :param bool takes_self: Pass the partially initialized instance that is
+ being initialized as a positional argument.
+
+ .. versionadded:: 17.1.0 *takes_self*
+ """
+
+ __slots__ = ("factory", "takes_self")
+
+ def __init__(self, factory, takes_self=False):
+ """
+ `Factory` is part of the default machinery so if we want a default
+ value here, we have to implement it ourselves.
+ """
+ self.factory = factory
+ self.takes_self = takes_self
+
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(getattr(self, name) for name in self.__slots__)
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ for name, value in zip(self.__slots__, state):
+ setattr(self, name, value)
+
+
+_f = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=True,
+ init=True,
+ inherited=False,
+ )
+ for name in Factory.__slots__
+]
+
+Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f)
+
+
+def make_class(name, attrs, bases=(object,), **attributes_arguments):
+ """
+ A quick way to create a new class called *name* with *attrs*.
+
+ :param str name: The name for the new class.
+
+ :param attrs: A list of names or a dictionary of mappings of names to
+ attributes.
+
+ If *attrs* is a list or an ordered dict (`dict` on Python 3.6+,
+ `collections.OrderedDict` otherwise), the order is deduced from
+ the order of the names or attributes inside *attrs*. Otherwise the
+ order of the definition of the attributes is used.
+ :type attrs: `list` or `dict`
+
+ :param tuple bases: Classes that the new class will subclass.
+
+ :param attributes_arguments: Passed unmodified to `attr.s`.
+
+ :return: A new class with *attrs*.
+ :rtype: type
+
+ .. versionadded:: 17.1.0 *bases*
+ .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
+ """
+ if isinstance(attrs, dict):
+ cls_dict = attrs
+ elif isinstance(attrs, (list, tuple)):
+ cls_dict = {a: attrib() for a in attrs}
+ else:
+ raise TypeError("attrs argument must be a dict or a list.")
+
+ pre_init = cls_dict.pop("__attrs_pre_init__", None)
+ post_init = cls_dict.pop("__attrs_post_init__", None)
+ user_init = cls_dict.pop("__init__", None)
+
+ body = {}
+ if pre_init is not None:
+ body["__attrs_pre_init__"] = pre_init
+ if post_init is not None:
+ body["__attrs_post_init__"] = post_init
+ if user_init is not None:
+ body["__init__"] = user_init
+
+ type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body))
+
+ # For pickling to work, the __module__ variable needs to be set to the
+ # frame where the class is created. Bypass this step in environments where
+ # sys._getframe is not defined (Jython for example) or sys._getframe is not
+ # defined for arguments greater than 0 (IronPython).
+ try:
+ type_.__module__ = sys._getframe(1).f_globals.get(
+ "__name__", "__main__"
+ )
+ except (AttributeError, ValueError):
+ pass
+
+ # We do it here for proper warnings with meaningful stacklevel.
+ cmp = attributes_arguments.pop("cmp", None)
+ (
+ attributes_arguments["eq"],
+ attributes_arguments["order"],
+ ) = _determine_attrs_eq_order(
+ cmp,
+ attributes_arguments.get("eq"),
+ attributes_arguments.get("order"),
+ True,
+ )
+
+ return _attrs(these=cls_dict, **attributes_arguments)(type_)
+
+
+# These are required by within this module so we define them here and merely
+# import into .validators / .converters.
+
+
+@attrs(slots=True, hash=True)
+class _AndValidator:
+ """
+ Compose many validators to a single one.
+ """
+
+ _validators = attrib()
+
+ def __call__(self, inst, attr, value):
+ for v in self._validators:
+ v(inst, attr, value)
+
+
+def and_(*validators):
+ """
+ A validator that composes multiple validators into one.
+
+ When called on a value, it runs all wrapped validators.
+
+ :param callables validators: Arbitrary number of validators.
+
+ .. versionadded:: 17.1.0
+ """
+ vals = []
+ for validator in validators:
+ vals.extend(
+ validator._validators
+ if isinstance(validator, _AndValidator)
+ else [validator]
+ )
+
+ return _AndValidator(tuple(vals))
+
+
+def pipe(*converters):
+ """
+ A converter that composes multiple converters into one.
+
+ When called on a value, it runs all wrapped converters, returning the
+ *last* value.
+
+ Type annotations will be inferred from the wrapped converters', if
+ they have any.
+
+ :param callables converters: Arbitrary number of converters.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def pipe_converter(val):
+ for converter in converters:
+ val = converter(val)
+
+ return val
+
+ if not converters:
+ # If the converter list is empty, pipe_converter is the identity.
+ A = typing.TypeVar("A")
+ pipe_converter.__annotations__ = {"val": A, "return": A}
+ else:
+ # Get parameter type from first converter.
+ t = _AnnotationExtractor(converters[0]).get_first_param_type()
+ if t:
+ pipe_converter.__annotations__["val"] = t
+
+ # Get return type from last converter.
+ rt = _AnnotationExtractor(converters[-1]).get_return_type()
+ if rt:
+ pipe_converter.__annotations__["return"] = rt
+
+ return pipe_converter
diff --git a/libs/attr/_next_gen.py b/libs/attr/_next_gen.py
new file mode 100644
index 000000000..5a06a7438
--- /dev/null
+++ b/libs/attr/_next_gen.py
@@ -0,0 +1,220 @@
+# SPDX-License-Identifier: MIT
+
+"""
+These are Python 3.6+-only and keyword-only APIs that call `attr.s` and
+`attr.ib` with different default values.
+"""
+
+
+from functools import partial
+
+from . import setters
+from ._funcs import asdict as _asdict
+from ._funcs import astuple as _astuple
+from ._make import (
+ NOTHING,
+ _frozen_setattrs,
+ _ng_default_on_setattr,
+ attrib,
+ attrs,
+)
+from .exceptions import UnannotatedAttributeError
+
+
+def define(
+ maybe_cls=None,
+ *,
+ these=None,
+ repr=None,
+ hash=None,
+ init=None,
+ slots=True,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=None,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=True,
+ eq=None,
+ order=False,
+ auto_detect=True,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+ match_args=True,
+):
+ r"""
+ Define an ``attrs`` class.
+
+ Differences to the classic `attr.s` that it uses underneath:
+
+ - Automatically detect whether or not *auto_attribs* should be `True` (c.f.
+ *auto_attribs* parameter).
+ - If *frozen* is `False`, run converters and validators when setting an
+ attribute by default.
+ - *slots=True*
+
+ .. caution::
+
+ Usually this has only upsides and few visible effects in everyday
+ programming. But it *can* lead to some suprising behaviors, so please
+ make sure to read :term:`slotted classes`.
+ - *auto_exc=True*
+ - *auto_detect=True*
+ - *order=False*
+ - Some options that were only relevant on Python 2 or were kept around for
+ backwards-compatibility have been removed.
+
+ Please note that these are all defaults and you can change them as you
+ wish.
+
+ :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves
+ exactly like `attr.s`. If left `None`, `attr.s` will try to guess:
+
+ 1. If any attributes are annotated and no unannotated `attrs.fields`\ s
+ are found, it assumes *auto_attribs=True*.
+ 2. Otherwise it assumes *auto_attribs=False* and tries to collect
+ `attrs.fields`\ s.
+
+ For now, please refer to `attr.s` for the rest of the parameters.
+
+ .. versionadded:: 20.1.0
+ .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
+ """
+
+ def do_it(cls, auto_attribs):
+ return attrs(
+ maybe_cls=cls,
+ these=these,
+ repr=repr,
+ hash=hash,
+ init=init,
+ slots=slots,
+ frozen=frozen,
+ weakref_slot=weakref_slot,
+ str=str,
+ auto_attribs=auto_attribs,
+ kw_only=kw_only,
+ cache_hash=cache_hash,
+ auto_exc=auto_exc,
+ eq=eq,
+ order=order,
+ auto_detect=auto_detect,
+ collect_by_mro=True,
+ getstate_setstate=getstate_setstate,
+ on_setattr=on_setattr,
+ field_transformer=field_transformer,
+ match_args=match_args,
+ )
+
+ def wrap(cls):
+ """
+ Making this a wrapper ensures this code runs during class creation.
+
+ We also ensure that frozen-ness of classes is inherited.
+ """
+ nonlocal frozen, on_setattr
+
+ had_on_setattr = on_setattr not in (None, setters.NO_OP)
+
+ # By default, mutable classes convert & validate on setattr.
+ if frozen is False and on_setattr is None:
+ on_setattr = _ng_default_on_setattr
+
+ # However, if we subclass a frozen class, we inherit the immutability
+ # and disable on_setattr.
+ for base_cls in cls.__bases__:
+ if base_cls.__setattr__ is _frozen_setattrs:
+ if had_on_setattr:
+ raise ValueError(
+ "Frozen classes can't use on_setattr "
+ "(frozen-ness was inherited)."
+ )
+
+ on_setattr = setters.NO_OP
+ break
+
+ if auto_attribs is not None:
+ return do_it(cls, auto_attribs)
+
+ try:
+ return do_it(cls, True)
+ except UnannotatedAttributeError:
+ return do_it(cls, False)
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+ else:
+ return wrap(maybe_cls)
+
+
+mutable = define
+frozen = partial(define, frozen=True, on_setattr=None)
+
+
+def field(
+ *,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ hash=None,
+ init=True,
+ metadata=None,
+ converter=None,
+ factory=None,
+ kw_only=False,
+ eq=None,
+ order=None,
+ on_setattr=None,
+):
+ """
+ Identical to `attr.ib`, except keyword-only and with some arguments
+ removed.
+
+ .. versionadded:: 20.1.0
+ """
+ return attrib(
+ default=default,
+ validator=validator,
+ repr=repr,
+ hash=hash,
+ init=init,
+ metadata=metadata,
+ converter=converter,
+ factory=factory,
+ kw_only=kw_only,
+ eq=eq,
+ order=order,
+ on_setattr=on_setattr,
+ )
+
+
+def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
+ """
+ Same as `attr.asdict`, except that collections types are always retained
+ and dict is always used as *dict_factory*.
+
+ .. versionadded:: 21.3.0
+ """
+ return _asdict(
+ inst=inst,
+ recurse=recurse,
+ filter=filter,
+ value_serializer=value_serializer,
+ retain_collection_types=True,
+ )
+
+
+def astuple(inst, *, recurse=True, filter=None):
+ """
+ Same as `attr.astuple`, except that collections types are always retained
+ and `tuple` is always used as the *tuple_factory*.
+
+ .. versionadded:: 21.3.0
+ """
+ return _astuple(
+ inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
+ )
diff --git a/libs/attr/_version_info.py b/libs/attr/_version_info.py
new file mode 100644
index 000000000..51a1312f9
--- /dev/null
+++ b/libs/attr/_version_info.py
@@ -0,0 +1,86 @@
+# SPDX-License-Identifier: MIT
+
+
+from functools import total_ordering
+
+from ._funcs import astuple
+from ._make import attrib, attrs
+
+
+@total_ordering
+@attrs(eq=False, order=False, slots=True, frozen=True)
+class VersionInfo:
+ """
+ A version object that can be compared to tuple of length 1--4:
+
+ >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
+ True
+ >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
+ True
+ >>> vi = attr.VersionInfo(19, 2, 0, "final")
+ >>> vi < (19, 1, 1)
+ False
+ >>> vi < (19,)
+ False
+ >>> vi == (19, 2,)
+ True
+ >>> vi == (19, 2, 1)
+ False
+
+ .. versionadded:: 19.2
+ """
+
+ year = attrib(type=int)
+ minor = attrib(type=int)
+ micro = attrib(type=int)
+ releaselevel = attrib(type=str)
+
+ @classmethod
+ def _from_version_string(cls, s):
+ """
+ Parse *s* and return a _VersionInfo.
+ """
+ v = s.split(".")
+ if len(v) == 3:
+ v.append("final")
+
+ return cls(
+ year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
+ )
+
+ def _ensure_tuple(self, other):
+ """
+ Ensure *other* is a tuple of a valid length.
+
+ Returns a possibly transformed *other* and ourselves as a tuple of
+ the same length as *other*.
+ """
+
+ if self.__class__ is other.__class__:
+ other = astuple(other)
+
+ if not isinstance(other, tuple):
+ raise NotImplementedError
+
+ if not (1 <= len(other) <= 4):
+ raise NotImplementedError
+
+ return astuple(self)[: len(other)], other
+
+ def __eq__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ return us == them
+
+ def __lt__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
+ # have to do anything special with releaselevel for now.
+ return us < them
diff --git a/libs/attr/_version_info.pyi b/libs/attr/_version_info.pyi
new file mode 100644
index 000000000..45ced0863
--- /dev/null
+++ b/libs/attr/_version_info.pyi
@@ -0,0 +1,9 @@
+class VersionInfo:
+ @property
+ def year(self) -> int: ...
+ @property
+ def minor(self) -> int: ...
+ @property
+ def micro(self) -> int: ...
+ @property
+ def releaselevel(self) -> str: ...
diff --git a/libs/attr/converters.py b/libs/attr/converters.py
new file mode 100644
index 000000000..a73626c26
--- /dev/null
+++ b/libs/attr/converters.py
@@ -0,0 +1,144 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful converters.
+"""
+
+
+import typing
+
+from ._compat import _AnnotationExtractor
+from ._make import NOTHING, Factory, pipe
+
+
+__all__ = [
+ "default_if_none",
+ "optional",
+ "pipe",
+ "to_bool",
+]
+
+
+def optional(converter):
+ """
+ A converter that allows an attribute to be optional. An optional attribute
+ is one which can be set to ``None``.
+
+ Type annotations will be inferred from the wrapped converter's, if it
+ has any.
+
+ :param callable converter: the converter that is used for non-``None``
+ values.
+
+ .. versionadded:: 17.1.0
+ """
+
+ def optional_converter(val):
+ if val is None:
+ return None
+ return converter(val)
+
+ xtr = _AnnotationExtractor(converter)
+
+ t = xtr.get_first_param_type()
+ if t:
+ optional_converter.__annotations__["val"] = typing.Optional[t]
+
+ rt = xtr.get_return_type()
+ if rt:
+ optional_converter.__annotations__["return"] = typing.Optional[rt]
+
+ return optional_converter
+
+
+def default_if_none(default=NOTHING, factory=None):
+ """
+ A converter that allows to replace ``None`` values by *default* or the
+ result of *factory*.
+
+ :param default: Value to be used if ``None`` is passed. Passing an instance
+ of `attrs.Factory` is supported, however the ``takes_self`` option
+ is *not*.
+ :param callable factory: A callable that takes no parameters whose result
+ is used if ``None`` is passed.
+
+ :raises TypeError: If **neither** *default* or *factory* is passed.
+ :raises TypeError: If **both** *default* and *factory* are passed.
+ :raises ValueError: If an instance of `attrs.Factory` is passed with
+ ``takes_self=True``.
+
+ .. versionadded:: 18.2.0
+ """
+ if default is NOTHING and factory is None:
+ raise TypeError("Must pass either `default` or `factory`.")
+
+ if default is not NOTHING and factory is not None:
+ raise TypeError(
+ "Must pass either `default` or `factory` but not both."
+ )
+
+ if factory is not None:
+ default = Factory(factory)
+
+ if isinstance(default, Factory):
+ if default.takes_self:
+ raise ValueError(
+ "`takes_self` is not supported by default_if_none."
+ )
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default.factory()
+
+ else:
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default
+
+ return default_if_none_converter
+
+
+def to_bool(val):
+ """
+ Convert "boolean" strings (e.g., from env. vars.) to real booleans.
+
+ Values mapping to :code:`True`:
+
+ - :code:`True`
+ - :code:`"true"` / :code:`"t"`
+ - :code:`"yes"` / :code:`"y"`
+ - :code:`"on"`
+ - :code:`"1"`
+ - :code:`1`
+
+ Values mapping to :code:`False`:
+
+ - :code:`False`
+ - :code:`"false"` / :code:`"f"`
+ - :code:`"no"` / :code:`"n"`
+ - :code:`"off"`
+ - :code:`"0"`
+ - :code:`0`
+
+ :raises ValueError: for any other value.
+
+ .. versionadded:: 21.3.0
+ """
+ if isinstance(val, str):
+ val = val.lower()
+ truthy = {True, "true", "t", "yes", "y", "on", "1", 1}
+ falsy = {False, "false", "f", "no", "n", "off", "0", 0}
+ try:
+ if val in truthy:
+ return True
+ if val in falsy:
+ return False
+ except TypeError:
+ # Raised when "val" is not hashable (e.g., lists)
+ pass
+ raise ValueError("Cannot convert value to bool: {}".format(val))
diff --git a/libs/attr/converters.pyi b/libs/attr/converters.pyi
new file mode 100644
index 000000000..0f58088a3
--- /dev/null
+++ b/libs/attr/converters.pyi
@@ -0,0 +1,13 @@
+from typing import Callable, Optional, TypeVar, overload
+
+from . import _ConverterType
+
+_T = TypeVar("_T")
+
+def pipe(*validators: _ConverterType) -> _ConverterType: ...
+def optional(converter: _ConverterType) -> _ConverterType: ...
+@overload
+def default_if_none(default: _T) -> _ConverterType: ...
+@overload
+def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
+def to_bool(val: str) -> bool: ...
diff --git a/libs/attr/exceptions.py b/libs/attr/exceptions.py
new file mode 100644
index 000000000..5dc51e0a8
--- /dev/null
+++ b/libs/attr/exceptions.py
@@ -0,0 +1,92 @@
+# SPDX-License-Identifier: MIT
+
+
+class FrozenError(AttributeError):
+ """
+ A frozen/immutable instance or attribute have been attempted to be
+ modified.
+
+ It mirrors the behavior of ``namedtuples`` by using the same error message
+ and subclassing `AttributeError`.
+
+ .. versionadded:: 20.1.0
+ """
+
+ msg = "can't set attribute"
+ args = [msg]
+
+
+class FrozenInstanceError(FrozenError):
+ """
+ A frozen instance has been attempted to be modified.
+
+ .. versionadded:: 16.1.0
+ """
+
+
+class FrozenAttributeError(FrozenError):
+ """
+ A frozen attribute has been attempted to be modified.
+
+ .. versionadded:: 20.1.0
+ """
+
+
+class AttrsAttributeNotFoundError(ValueError):
+ """
+ An ``attrs`` function couldn't find an attribute that the user asked for.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class NotAnAttrsClassError(ValueError):
+ """
+ A non-``attrs`` class has been passed into an ``attrs`` function.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class DefaultAlreadySetError(RuntimeError):
+ """
+ A default has been set using ``attr.ib()`` and is attempted to be reset
+ using the decorator.
+
+ .. versionadded:: 17.1.0
+ """
+
+
+class UnannotatedAttributeError(RuntimeError):
+ """
+ A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type
+ annotation.
+
+ .. versionadded:: 17.3.0
+ """
+
+
+class PythonTooOldError(RuntimeError):
+ """
+ It was attempted to use an ``attrs`` feature that requires a newer Python
+ version.
+
+ .. versionadded:: 18.2.0
+ """
+
+
+class NotCallableError(TypeError):
+ """
+ A ``attr.ib()`` requiring a callable has been set with a value
+ that is not callable.
+
+ .. versionadded:: 19.2.0
+ """
+
+ def __init__(self, msg, value):
+ super(TypeError, self).__init__(msg, value)
+ self.msg = msg
+ self.value = value
+
+ def __str__(self):
+ return str(self.msg)
diff --git a/libs/attr/exceptions.pyi b/libs/attr/exceptions.pyi
new file mode 100644
index 000000000..f2680118b
--- /dev/null
+++ b/libs/attr/exceptions.pyi
@@ -0,0 +1,17 @@
+from typing import Any
+
+class FrozenError(AttributeError):
+ msg: str = ...
+
+class FrozenInstanceError(FrozenError): ...
+class FrozenAttributeError(FrozenError): ...
+class AttrsAttributeNotFoundError(ValueError): ...
+class NotAnAttrsClassError(ValueError): ...
+class DefaultAlreadySetError(RuntimeError): ...
+class UnannotatedAttributeError(RuntimeError): ...
+class PythonTooOldError(RuntimeError): ...
+
+class NotCallableError(TypeError):
+ msg: str = ...
+ value: Any = ...
+ def __init__(self, msg: str, value: Any) -> None: ...
diff --git a/libs/attr/filters.py b/libs/attr/filters.py
new file mode 100644
index 000000000..baa25e946
--- /dev/null
+++ b/libs/attr/filters.py
@@ -0,0 +1,51 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful filters for `attr.asdict`.
+"""
+
+from ._make import Attribute
+
+
+def _split_what(what):
+ """
+ Returns a tuple of `frozenset`s of classes and attributes.
+ """
+ return (
+ frozenset(cls for cls in what if isinstance(cls, type)),
+ frozenset(cls for cls in what if isinstance(cls, Attribute)),
+ )
+
+
+def include(*what):
+ """
+ Include *what*.
+
+ :param what: What to include.
+ :type what: `list` of `type` or `attrs.Attribute`\\ s
+
+ :rtype: `callable`
+ """
+ cls, attrs = _split_what(what)
+
+ def include_(attribute, value):
+ return value.__class__ in cls or attribute in attrs
+
+ return include_
+
+
+def exclude(*what):
+ """
+ Exclude *what*.
+
+ :param what: What to exclude.
+ :type what: `list` of classes or `attrs.Attribute`\\ s.
+
+ :rtype: `callable`
+ """
+ cls, attrs = _split_what(what)
+
+ def exclude_(attribute, value):
+ return value.__class__ not in cls and attribute not in attrs
+
+ return exclude_
diff --git a/libs/attr/filters.pyi b/libs/attr/filters.pyi
new file mode 100644
index 000000000..993866865
--- /dev/null
+++ b/libs/attr/filters.pyi
@@ -0,0 +1,6 @@
+from typing import Any, Union
+
+from . import Attribute, _FilterType
+
+def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
+def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
diff --git a/libs/attr/py.typed b/libs/attr/py.typed
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/libs/attr/py.typed
diff --git a/libs/attr/setters.py b/libs/attr/setters.py
new file mode 100644
index 000000000..12ed6750d
--- /dev/null
+++ b/libs/attr/setters.py
@@ -0,0 +1,73 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly used hooks for on_setattr.
+"""
+
+
+from . import _config
+from .exceptions import FrozenAttributeError
+
+
+def pipe(*setters):
+ """
+ Run all *setters* and return the return value of the last one.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def wrapped_pipe(instance, attrib, new_value):
+ rv = new_value
+
+ for setter in setters:
+ rv = setter(instance, attrib, rv)
+
+ return rv
+
+ return wrapped_pipe
+
+
+def frozen(_, __, ___):
+ """
+ Prevent an attribute to be modified.
+
+ .. versionadded:: 20.1.0
+ """
+ raise FrozenAttributeError()
+
+
+def validate(instance, attrib, new_value):
+ """
+ Run *attrib*'s validator on *new_value* if it has one.
+
+ .. versionadded:: 20.1.0
+ """
+ if _config._run_validators is False:
+ return new_value
+
+ v = attrib.validator
+ if not v:
+ return new_value
+
+ v(instance, attrib, new_value)
+
+ return new_value
+
+
+def convert(instance, attrib, new_value):
+ """
+ Run *attrib*'s converter -- if it has one -- on *new_value* and return the
+ result.
+
+ .. versionadded:: 20.1.0
+ """
+ c = attrib.converter
+ if c:
+ return c(new_value)
+
+ return new_value
+
+
+# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
+# autodata stopped working, so the docstring is inlined in the API docs.
+NO_OP = object()
diff --git a/libs/attr/setters.pyi b/libs/attr/setters.pyi
new file mode 100644
index 000000000..3f5603c2b
--- /dev/null
+++ b/libs/attr/setters.pyi
@@ -0,0 +1,19 @@
+from typing import Any, NewType, NoReturn, TypeVar, cast
+
+from . import Attribute, _OnSetAttrType
+
+_T = TypeVar("_T")
+
+def frozen(
+ instance: Any, attribute: Attribute[Any], new_value: Any
+) -> NoReturn: ...
+def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
+def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
+
+# convert is allowed to return Any, because they can be chained using pipe.
+def convert(
+ instance: Any, attribute: Attribute[Any], new_value: Any
+) -> Any: ...
+
+_NoOpType = NewType("_NoOpType", object)
+NO_OP: _NoOpType
diff --git a/libs/attr/validators.py b/libs/attr/validators.py
new file mode 100644
index 000000000..eece517da
--- /dev/null
+++ b/libs/attr/validators.py
@@ -0,0 +1,594 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful validators.
+"""
+
+
+import operator
+import re
+
+from contextlib import contextmanager
+
+from ._config import get_run_validators, set_run_validators
+from ._make import _AndValidator, and_, attrib, attrs
+from .exceptions import NotCallableError
+
+
+try:
+ Pattern = re.Pattern
+except AttributeError: # Python <3.7 lacks a Pattern type.
+ Pattern = type(re.compile(""))
+
+
+__all__ = [
+ "and_",
+ "deep_iterable",
+ "deep_mapping",
+ "disabled",
+ "ge",
+ "get_disabled",
+ "gt",
+ "in_",
+ "instance_of",
+ "is_callable",
+ "le",
+ "lt",
+ "matches_re",
+ "max_len",
+ "min_len",
+ "optional",
+ "provides",
+ "set_disabled",
+]
+
+
+def set_disabled(disabled):
+ """
+ Globally disable or enable running validators.
+
+ By default, they are run.
+
+ :param disabled: If ``True``, disable running all validators.
+ :type disabled: bool
+
+ .. warning::
+
+ This function is not thread-safe!
+
+ .. versionadded:: 21.3.0
+ """
+ set_run_validators(not disabled)
+
+
+def get_disabled():
+ """
+ Return a bool indicating whether validators are currently disabled or not.
+
+ :return: ``True`` if validators are currently disabled.
+ :rtype: bool
+
+ .. versionadded:: 21.3.0
+ """
+ return not get_run_validators()
+
+
+@contextmanager
+def disabled():
+ """
+ Context manager that disables running validators within its context.
+
+ .. warning::
+
+ This context manager is not thread-safe!
+
+ .. versionadded:: 21.3.0
+ """
+ set_run_validators(False)
+ try:
+ yield
+ finally:
+ set_run_validators(True)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _InstanceOfValidator:
+ type = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not isinstance(value, self.type):
+ raise TypeError(
+ "'{name}' must be {type!r} (got {value!r} that is a "
+ "{actual!r}).".format(
+ name=attr.name,
+ type=self.type,
+ actual=value.__class__,
+ value=value,
+ ),
+ attr,
+ self.type,
+ value,
+ )
+
+ def __repr__(self):
+ return "<instance_of validator for type {type!r}>".format(
+ type=self.type
+ )
+
+
+def instance_of(type):
+ """
+ A validator that raises a `TypeError` if the initializer is called
+ with a wrong type for this particular attribute (checks are performed using
+ `isinstance` therefore it's also valid to pass a tuple of types).
+
+ :param type: The type to check for.
+ :type type: type or tuple of types
+
+ :raises TypeError: With a human readable error message, the attribute
+ (of type `attrs.Attribute`), the expected type, and the value it
+ got.
+ """
+ return _InstanceOfValidator(type)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MatchesReValidator:
+ pattern = attrib()
+ match_func = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.match_func(value):
+ raise ValueError(
+ "'{name}' must match regex {pattern!r}"
+ " ({value!r} doesn't)".format(
+ name=attr.name, pattern=self.pattern.pattern, value=value
+ ),
+ attr,
+ self.pattern,
+ value,
+ )
+
+ def __repr__(self):
+ return "<matches_re validator for pattern {pattern!r}>".format(
+ pattern=self.pattern
+ )
+
+
+def matches_re(regex, flags=0, func=None):
+ r"""
+ A validator that raises `ValueError` if the initializer is called
+ with a string that doesn't match *regex*.
+
+ :param regex: a regex string or precompiled pattern to match against
+ :param int flags: flags that will be passed to the underlying re function
+ (default 0)
+ :param callable func: which underlying `re` function to call. Valid options
+ are `re.fullmatch`, `re.search`, and `re.match`; the default ``None``
+ means `re.fullmatch`. For performance reasons, the pattern is always
+ precompiled using `re.compile`.
+
+ .. versionadded:: 19.2.0
+ .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
+ """
+ valid_funcs = (re.fullmatch, None, re.search, re.match)
+ if func not in valid_funcs:
+ raise ValueError(
+ "'func' must be one of {}.".format(
+ ", ".join(
+ sorted(
+ e and e.__name__ or "None" for e in set(valid_funcs)
+ )
+ )
+ )
+ )
+
+ if isinstance(regex, Pattern):
+ if flags:
+ raise TypeError(
+ "'flags' can only be used with a string pattern; "
+ "pass flags to re.compile() instead"
+ )
+ pattern = regex
+ else:
+ pattern = re.compile(regex, flags)
+
+ if func is re.match:
+ match_func = pattern.match
+ elif func is re.search:
+ match_func = pattern.search
+ else:
+ match_func = pattern.fullmatch
+
+ return _MatchesReValidator(pattern, match_func)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _ProvidesValidator:
+ interface = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.interface.providedBy(value):
+ raise TypeError(
+ "'{name}' must provide {interface!r} which {value!r} "
+ "doesn't.".format(
+ name=attr.name, interface=self.interface, value=value
+ ),
+ attr,
+ self.interface,
+ value,
+ )
+
+ def __repr__(self):
+ return "<provides validator for interface {interface!r}>".format(
+ interface=self.interface
+ )
+
+
+def provides(interface):
+ """
+ A validator that raises a `TypeError` if the initializer is called
+ with an object that does not provide the requested *interface* (checks are
+ performed using ``interface.providedBy(value)`` (see `zope.interface
+ <https://zopeinterface.readthedocs.io/en/latest/>`_).
+
+ :param interface: The interface to check for.
+ :type interface: ``zope.interface.Interface``
+
+ :raises TypeError: With a human readable error message, the attribute
+ (of type `attrs.Attribute`), the expected interface, and the
+ value it got.
+ """
+ return _ProvidesValidator(interface)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _OptionalValidator:
+ validator = attrib()
+
+ def __call__(self, inst, attr, value):
+ if value is None:
+ return
+
+ self.validator(inst, attr, value)
+
+ def __repr__(self):
+ return "<optional validator for {what} or None>".format(
+ what=repr(self.validator)
+ )
+
+
+def optional(validator):
+ """
+ A validator that makes an attribute optional. An optional attribute is one
+ which can be set to ``None`` in addition to satisfying the requirements of
+ the sub-validator.
+
+ :param validator: A validator (or a list of validators) that is used for
+ non-``None`` values.
+ :type validator: callable or `list` of callables.
+
+ .. versionadded:: 15.1.0
+ .. versionchanged:: 17.1.0 *validator* can be a list of validators.
+ """
+ if isinstance(validator, list):
+ return _OptionalValidator(_AndValidator(validator))
+ return _OptionalValidator(validator)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _InValidator:
+ options = attrib()
+
+ def __call__(self, inst, attr, value):
+ try:
+ in_options = value in self.options
+ except TypeError: # e.g. `1 in "abc"`
+ in_options = False
+
+ if not in_options:
+ raise ValueError(
+ "'{name}' must be in {options!r} (got {value!r})".format(
+ name=attr.name, options=self.options, value=value
+ ),
+ attr,
+ self.options,
+ value,
+ )
+
+ def __repr__(self):
+ return "<in_ validator with options {options!r}>".format(
+ options=self.options
+ )
+
+
+def in_(options):
+ """
+ A validator that raises a `ValueError` if the initializer is called
+ with a value that does not belong in the options provided. The check is
+ performed using ``value in options``.
+
+ :param options: Allowed options.
+ :type options: list, tuple, `enum.Enum`, ...
+
+ :raises ValueError: With a human readable error message, the attribute (of
+ type `attrs.Attribute`), the expected options, and the value it
+ got.
+
+ .. versionadded:: 17.1.0
+ .. versionchanged:: 22.1.0
+ The ValueError was incomplete until now and only contained the human
+ readable error message. Now it contains all the information that has
+ been promised since 17.1.0.
+ """
+ return _InValidator(options)
+
+
+@attrs(repr=False, slots=False, hash=True)
+class _IsCallableValidator:
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not callable(value):
+ message = (
+ "'{name}' must be callable "
+ "(got {value!r} that is a {actual!r})."
+ )
+ raise NotCallableError(
+ msg=message.format(
+ name=attr.name, value=value, actual=value.__class__
+ ),
+ value=value,
+ )
+
+ def __repr__(self):
+ return "<is_callable validator>"
+
+
+def is_callable():
+ """
+ A validator that raises a `attr.exceptions.NotCallableError` if the
+ initializer is called with a value for this particular attribute
+ that is not callable.
+
+ .. versionadded:: 19.1.0
+
+ :raises `attr.exceptions.NotCallableError`: With a human readable error
+ message containing the attribute (`attrs.Attribute`) name,
+ and the value it got.
+ """
+ return _IsCallableValidator()
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _DeepIterable:
+ member_validator = attrib(validator=is_callable())
+ iterable_validator = attrib(
+ default=None, validator=optional(is_callable())
+ )
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.iterable_validator is not None:
+ self.iterable_validator(inst, attr, value)
+
+ for member in value:
+ self.member_validator(inst, attr, member)
+
+ def __repr__(self):
+ iterable_identifier = (
+ ""
+ if self.iterable_validator is None
+ else " {iterable!r}".format(iterable=self.iterable_validator)
+ )
+ return (
+ "<deep_iterable validator for{iterable_identifier}"
+ " iterables of {member!r}>"
+ ).format(
+ iterable_identifier=iterable_identifier,
+ member=self.member_validator,
+ )
+
+
+def deep_iterable(member_validator, iterable_validator=None):
+ """
+ A validator that performs deep validation of an iterable.
+
+ :param member_validator: Validator(s) to apply to iterable members
+ :param iterable_validator: Validator to apply to iterable itself
+ (optional)
+
+ .. versionadded:: 19.1.0
+
+ :raises TypeError: if any sub-validators fail
+ """
+ if isinstance(member_validator, (list, tuple)):
+ member_validator = and_(*member_validator)
+ return _DeepIterable(member_validator, iterable_validator)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _DeepMapping:
+ key_validator = attrib(validator=is_callable())
+ value_validator = attrib(validator=is_callable())
+ mapping_validator = attrib(default=None, validator=optional(is_callable()))
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.mapping_validator is not None:
+ self.mapping_validator(inst, attr, value)
+
+ for key in value:
+ self.key_validator(inst, attr, key)
+ self.value_validator(inst, attr, value[key])
+
+ def __repr__(self):
+ return (
+ "<deep_mapping validator for objects mapping {key!r} to {value!r}>"
+ ).format(key=self.key_validator, value=self.value_validator)
+
+
+def deep_mapping(key_validator, value_validator, mapping_validator=None):
+ """
+ A validator that performs deep validation of a dictionary.
+
+ :param key_validator: Validator to apply to dictionary keys
+ :param value_validator: Validator to apply to dictionary values
+ :param mapping_validator: Validator to apply to top-level mapping
+ attribute (optional)
+
+ .. versionadded:: 19.1.0
+
+ :raises TypeError: if any sub-validators fail
+ """
+ return _DeepMapping(key_validator, value_validator, mapping_validator)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _NumberValidator:
+ bound = attrib()
+ compare_op = attrib()
+ compare_func = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.compare_func(value, self.bound):
+ raise ValueError(
+ "'{name}' must be {op} {bound}: {value}".format(
+ name=attr.name,
+ op=self.compare_op,
+ bound=self.bound,
+ value=value,
+ )
+ )
+
+ def __repr__(self):
+ return "<Validator for x {op} {bound}>".format(
+ op=self.compare_op, bound=self.bound
+ )
+
+
+def lt(val):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a number larger or equal to *val*.
+
+ :param val: Exclusive upper bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, "<", operator.lt)
+
+
+def le(val):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a number greater than *val*.
+
+ :param val: Inclusive upper bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, "<=", operator.le)
+
+
+def ge(val):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a number smaller than *val*.
+
+ :param val: Inclusive lower bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, ">=", operator.ge)
+
+
+def gt(val):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a number smaller or equal to *val*.
+
+ :param val: Exclusive lower bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, ">", operator.gt)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MaxLengthValidator:
+ max_length = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if len(value) > self.max_length:
+ raise ValueError(
+ "Length of '{name}' must be <= {max}: {len}".format(
+ name=attr.name, max=self.max_length, len=len(value)
+ )
+ )
+
+ def __repr__(self):
+ return "<max_len validator for {max}>".format(max=self.max_length)
+
+
+def max_len(length):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a string or iterable that is longer than *length*.
+
+ :param int length: Maximum length of the string or iterable
+
+ .. versionadded:: 21.3.0
+ """
+ return _MaxLengthValidator(length)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MinLengthValidator:
+ min_length = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if len(value) < self.min_length:
+ raise ValueError(
+ "Length of '{name}' must be => {min}: {len}".format(
+ name=attr.name, min=self.min_length, len=len(value)
+ )
+ )
+
+ def __repr__(self):
+ return "<min_len validator for {min}>".format(min=self.min_length)
+
+
+def min_len(length):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a string or iterable that is shorter than *length*.
+
+ :param int length: Minimum length of the string or iterable
+
+ .. versionadded:: 22.1.0
+ """
+ return _MinLengthValidator(length)
diff --git a/libs/attr/validators.pyi b/libs/attr/validators.pyi
new file mode 100644
index 000000000..54b9dba24
--- /dev/null
+++ b/libs/attr/validators.pyi
@@ -0,0 +1,80 @@
+from typing import (
+ Any,
+ AnyStr,
+ Callable,
+ Container,
+ ContextManager,
+ Iterable,
+ List,
+ Mapping,
+ Match,
+ Optional,
+ Pattern,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ overload,
+)
+
+from . import _ValidatorType
+from . import _ValidatorArgType
+
+_T = TypeVar("_T")
+_T1 = TypeVar("_T1")
+_T2 = TypeVar("_T2")
+_T3 = TypeVar("_T3")
+_I = TypeVar("_I", bound=Iterable)
+_K = TypeVar("_K")
+_V = TypeVar("_V")
+_M = TypeVar("_M", bound=Mapping)
+
+def set_disabled(run: bool) -> None: ...
+def get_disabled() -> bool: ...
+def disabled() -> ContextManager[None]: ...
+
+# To be more precise on instance_of use some overloads.
+# If there are more than 3 items in the tuple then we fall back to Any
+@overload
+def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(
+ type: Tuple[Type[_T1], Type[_T2]]
+) -> _ValidatorType[Union[_T1, _T2]]: ...
+@overload
+def instance_of(
+ type: Tuple[Type[_T1], Type[_T2], Type[_T3]]
+) -> _ValidatorType[Union[_T1, _T2, _T3]]: ...
+@overload
+def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ...
+def provides(interface: Any) -> _ValidatorType[Any]: ...
+def optional(
+ validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]]
+) -> _ValidatorType[Optional[_T]]: ...
+def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
+def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
+def matches_re(
+ regex: Union[Pattern[AnyStr], AnyStr],
+ flags: int = ...,
+ func: Optional[
+ Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]]
+ ] = ...,
+) -> _ValidatorType[AnyStr]: ...
+def deep_iterable(
+ member_validator: _ValidatorArgType[_T],
+ iterable_validator: Optional[_ValidatorType[_I]] = ...,
+) -> _ValidatorType[_I]: ...
+def deep_mapping(
+ key_validator: _ValidatorType[_K],
+ value_validator: _ValidatorType[_V],
+ mapping_validator: Optional[_ValidatorType[_M]] = ...,
+) -> _ValidatorType[_M]: ...
+def is_callable() -> _ValidatorType[_T]: ...
+def lt(val: _T) -> _ValidatorType[_T]: ...
+def le(val: _T) -> _ValidatorType[_T]: ...
+def ge(val: _T) -> _ValidatorType[_T]: ...
+def gt(val: _T) -> _ValidatorType[_T]: ...
+def max_len(length: int) -> _ValidatorType[_T]: ...
+def min_len(length: int) -> _ValidatorType[_T]: ...
diff --git a/libs/attrs/__init__.py b/libs/attrs/__init__.py
new file mode 100644
index 000000000..a704b8b56
--- /dev/null
+++ b/libs/attrs/__init__.py
@@ -0,0 +1,70 @@
+# SPDX-License-Identifier: MIT
+
+from attr import (
+ NOTHING,
+ Attribute,
+ Factory,
+ __author__,
+ __copyright__,
+ __description__,
+ __doc__,
+ __email__,
+ __license__,
+ __title__,
+ __url__,
+ __version__,
+ __version_info__,
+ assoc,
+ cmp_using,
+ define,
+ evolve,
+ field,
+ fields,
+ fields_dict,
+ frozen,
+ has,
+ make_class,
+ mutable,
+ resolve_types,
+ validate,
+)
+from attr._next_gen import asdict, astuple
+
+from . import converters, exceptions, filters, setters, validators
+
+
+__all__ = [
+ "__author__",
+ "__copyright__",
+ "__description__",
+ "__doc__",
+ "__email__",
+ "__license__",
+ "__title__",
+ "__url__",
+ "__version__",
+ "__version_info__",
+ "asdict",
+ "assoc",
+ "astuple",
+ "Attribute",
+ "cmp_using",
+ "converters",
+ "define",
+ "evolve",
+ "exceptions",
+ "Factory",
+ "field",
+ "fields_dict",
+ "fields",
+ "filters",
+ "frozen",
+ "has",
+ "make_class",
+ "mutable",
+ "NOTHING",
+ "resolve_types",
+ "setters",
+ "validate",
+ "validators",
+]
diff --git a/libs/attrs/__init__.pyi b/libs/attrs/__init__.pyi
new file mode 100644
index 000000000..fc44de46a
--- /dev/null
+++ b/libs/attrs/__init__.pyi
@@ -0,0 +1,66 @@
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Mapping,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+)
+
+# Because we need to type our own stuff, we have to make everything from
+# attr explicitly public too.
+from attr import __author__ as __author__
+from attr import __copyright__ as __copyright__
+from attr import __description__ as __description__
+from attr import __email__ as __email__
+from attr import __license__ as __license__
+from attr import __title__ as __title__
+from attr import __url__ as __url__
+from attr import __version__ as __version__
+from attr import __version_info__ as __version_info__
+from attr import _FilterType
+from attr import assoc as assoc
+from attr import Attribute as Attribute
+from attr import cmp_using as cmp_using
+from attr import converters as converters
+from attr import define as define
+from attr import evolve as evolve
+from attr import exceptions as exceptions
+from attr import Factory as Factory
+from attr import field as field
+from attr import fields as fields
+from attr import fields_dict as fields_dict
+from attr import filters as filters
+from attr import frozen as frozen
+from attr import has as has
+from attr import make_class as make_class
+from attr import mutable as mutable
+from attr import NOTHING as NOTHING
+from attr import resolve_types as resolve_types
+from attr import setters as setters
+from attr import validate as validate
+from attr import validators as validators
+
+# TODO: see definition of attr.asdict/astuple
+def asdict(
+ inst: Any,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ dict_factory: Type[Mapping[Any, Any]] = ...,
+ retain_collection_types: bool = ...,
+ value_serializer: Optional[
+ Callable[[type, Attribute[Any], Any], Any]
+ ] = ...,
+ tuple_keys: bool = ...,
+) -> Dict[str, Any]: ...
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+def astuple(
+ inst: Any,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ tuple_factory: Type[Sequence[Any]] = ...,
+ retain_collection_types: bool = ...,
+) -> Tuple[Any, ...]: ...
diff --git a/libs/attrs/converters.py b/libs/attrs/converters.py
new file mode 100644
index 000000000..edfa8d3c1
--- /dev/null
+++ b/libs/attrs/converters.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.converters import * # noqa
diff --git a/libs/attrs/exceptions.py b/libs/attrs/exceptions.py
new file mode 100644
index 000000000..bd9efed20
--- /dev/null
+++ b/libs/attrs/exceptions.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.exceptions import * # noqa
diff --git a/libs/attrs/filters.py b/libs/attrs/filters.py
new file mode 100644
index 000000000..52959005b
--- /dev/null
+++ b/libs/attrs/filters.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.filters import * # noqa
diff --git a/libs/attrs/py.typed b/libs/attrs/py.typed
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/libs/attrs/py.typed
diff --git a/libs/attrs/setters.py b/libs/attrs/setters.py
new file mode 100644
index 000000000..9b5077080
--- /dev/null
+++ b/libs/attrs/setters.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.setters import * # noqa
diff --git a/libs/attrs/validators.py b/libs/attrs/validators.py
new file mode 100644
index 000000000..ab2c9b302
--- /dev/null
+++ b/libs/attrs/validators.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.validators import * # noqa
diff --git a/libs/version.txt b/libs/version.txt
index 16606c6ec..bd3d904d3 100644
--- a/libs/version.txt
+++ b/libs/version.txt
@@ -1,7 +1,9 @@
# Bazarr dependencies
+aniso==9.0.1
argparse==1.4.0
apprise==0.9.8.3
apscheduler==3.8.1
+attrs==22.1.0
charamel==1.0.0
deep-translator==1.8.3
dogpile.cache==1.1.5