virtuelle Umgebung teil20b

This commit is contained in:
Olli Graf
2023-09-01 16:01:37 +02:00
parent c9aee44812
commit c04c88e2af
7025 changed files with 1325157 additions and 0 deletions

View File

@@ -0,0 +1,85 @@
__all__ = [
"dtypes",
"localize_pydatetime",
"NaT",
"NaTType",
"iNaT",
"nat_strings",
"OutOfBoundsDatetime",
"OutOfBoundsTimedelta",
"IncompatibleFrequency",
"Period",
"Resolution",
"Timedelta",
"normalize_i8_timestamps",
"is_date_array_normalized",
"dt64arr_to_periodarr",
"delta_to_nanoseconds",
"ints_to_pydatetime",
"ints_to_pytimedelta",
"get_resolution",
"Timestamp",
"tz_convert_from_utc_single",
"tz_convert_from_utc",
"to_offset",
"Tick",
"BaseOffset",
"tz_compare",
"is_unitless",
"astype_overflowsafe",
"get_unit_from_dtype",
"periods_per_day",
"periods_per_second",
"is_supported_unit",
"npy_unit_to_abbrev",
"get_supported_reso",
]
from pandas._libs.tslibs import dtypes # pylint: disable=import-self
from pandas._libs.tslibs.conversion import localize_pydatetime
from pandas._libs.tslibs.dtypes import (
Resolution,
get_supported_reso,
is_supported_unit,
npy_unit_to_abbrev,
periods_per_day,
periods_per_second,
)
from pandas._libs.tslibs.nattype import (
NaT,
NaTType,
iNaT,
nat_strings,
)
from pandas._libs.tslibs.np_datetime import (
OutOfBoundsDatetime,
OutOfBoundsTimedelta,
astype_overflowsafe,
is_unitless,
py_get_unit_from_dtype as get_unit_from_dtype,
)
from pandas._libs.tslibs.offsets import (
BaseOffset,
Tick,
to_offset,
)
from pandas._libs.tslibs.period import (
IncompatibleFrequency,
Period,
)
from pandas._libs.tslibs.timedeltas import (
Timedelta,
delta_to_nanoseconds,
ints_to_pytimedelta,
)
from pandas._libs.tslibs.timestamps import Timestamp
from pandas._libs.tslibs.timezones import tz_compare
from pandas._libs.tslibs.tzconversion import tz_convert_from_utc_single
from pandas._libs.tslibs.vectorized import (
dt64arr_to_periodarr,
get_resolution,
ints_to_pydatetime,
is_date_array_normalized,
normalize_i8_timestamps,
tz_convert_from_utc,
)

View File

@@ -0,0 +1,5 @@
from cpython.datetime cimport datetime
cdef class ABCTimestamp(datetime):
pass

View File

@@ -0,0 +1,12 @@
"""
We define base classes that will be inherited by Timestamp, Timedelta, etc
in order to allow for fast isinstance checks without circular dependency issues.
This is analogous to core.dtypes.generic.
"""
from cpython.datetime cimport datetime
cdef class ABCTimestamp(datetime):
pass

View File

@@ -0,0 +1,20 @@
from cython cimport Py_ssize_t
from numpy cimport (
int32_t,
int64_t,
)
ctypedef (int32_t, int32_t, int32_t) iso_calendar_t
cdef int dayofweek(int y, int m, int d) noexcept nogil
cdef bint is_leapyear(int64_t year) noexcept nogil
cpdef int32_t get_days_in_month(int year, Py_ssize_t month) noexcept nogil
cpdef int32_t get_week_of_year(int year, int month, int day) noexcept nogil
cpdef iso_calendar_t get_iso_calendar(int year, int month, int day) noexcept nogil
cpdef int32_t get_day_of_year(int year, int month, int day) noexcept nogil
cpdef int get_lastbday(int year, int month) noexcept nogil
cpdef int get_firstbday(int year, int month) noexcept nogil
cdef dict c_MONTH_NUMBERS
cdef int32_t* month_offset

View File

@@ -0,0 +1,12 @@
DAYS: list[str]
MONTH_ALIASES: dict[int, str]
MONTH_NUMBERS: dict[str, int]
MONTHS: list[str]
int_to_weekday: dict[int, str]
def get_firstbday(year: int, month: int) -> int: ...
def get_lastbday(year: int, month: int) -> int: ...
def get_day_of_year(year: int, month: int, day: int) -> int: ...
def get_iso_calendar(year: int, month: int, day: int) -> tuple[int, int, int]: ...
def get_week_of_year(year: int, month: int, day: int) -> int: ...
def get_days_in_month(year: int, month: int) -> int: ...

View File

@@ -0,0 +1,310 @@
# cython: boundscheck=False
"""
Cython implementations of functions resembling the stdlib calendar module
"""
cimport cython
from numpy cimport (
int32_t,
int64_t,
)
# ----------------------------------------------------------------------
# Constants
# Slightly more performant cython lookups than a 2D table
# The first 12 entries correspond to month lengths for non-leap years.
# The remaining 12 entries give month lengths for leap years
cdef int32_t* days_per_month_array = [
31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31,
31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
cdef int* em = [0, 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334]
# The first 13 entries give the month days elapsed as of the first of month N
# (or the total number of days in the year for N=13) in non-leap years.
# The remaining 13 entries give the days elapsed in leap years.
cdef int32_t* month_offset = [
0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365,
0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366]
# Canonical location for other modules to find name constants
MONTHS = ["JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL",
"AUG", "SEP", "OCT", "NOV", "DEC"]
# The first blank line is consistent with calendar.month_name in the calendar
# standard library
MONTHS_FULL = ["", "January", "February", "March", "April", "May", "June",
"July", "August", "September", "October", "November",
"December"]
MONTH_NUMBERS = {name: num for num, name in enumerate(MONTHS)}
cdef dict c_MONTH_NUMBERS = MONTH_NUMBERS
MONTH_ALIASES = {(num + 1): name for num, name in enumerate(MONTHS)}
MONTH_TO_CAL_NUM = {name: num + 1 for num, name in enumerate(MONTHS)}
DAYS = ["MON", "TUE", "WED", "THU", "FRI", "SAT", "SUN"]
DAYS_FULL = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday",
"Saturday", "Sunday"]
int_to_weekday = {num: name for num, name in enumerate(DAYS)}
weekday_to_int = {int_to_weekday[key]: key for key in int_to_weekday}
# ----------------------------------------------------------------------
@cython.wraparound(False)
@cython.boundscheck(False)
cpdef int32_t get_days_in_month(int year, Py_ssize_t month) noexcept nogil:
"""
Return the number of days in the given month of the given year.
Parameters
----------
year : int
month : int
Returns
-------
days_in_month : int
Notes
-----
Assumes that the arguments are valid. Passing a month not between 1 and 12
risks a segfault.
"""
return days_per_month_array[12 * is_leapyear(year) + month - 1]
@cython.wraparound(False)
@cython.boundscheck(False)
@cython.cdivision(True)
cdef long quot(long a , long b) noexcept nogil:
cdef long x
x = a/b
if (a < 0):
x -= (a % b != 0)
return x
@cython.wraparound(False)
@cython.boundscheck(False)
@cython.cdivision(True)
cdef int dayofweek(int y, int m, int d) noexcept nogil:
"""
Find the day of week for the date described by the Y/M/D triple y, m, d
using Gauss' method, from wikipedia.
0 represents Monday. See [1]_.
Parameters
----------
y : int
m : int
d : int
Returns
-------
weekday : int
Notes
-----
Assumes that y, m, d, represents a valid date.
See Also
--------
[1] https://docs.python.org/3/library/calendar.html#calendar.weekday
[2] https://en.wikipedia.org/wiki/\
Determination_of_the_day_of_the_week#Gauss's_algorithm
"""
# Note: this particular implementation comes from
# http://berndt-schwerdtfeger.de/wp-content/uploads/pdf/cal.pdf
cdef:
long c
int g
int f
int e
if (m < 3):
y -= 1
c = quot(y, 100)
g = y - c * 100
f = 5 * (c - quot(c, 4) * 4)
e = em[m]
if (m > 2):
e -= 1
return (-1 + d + e + f + g + g/4) % 7
cdef bint is_leapyear(int64_t year) noexcept nogil:
"""
Returns 1 if the given year is a leap year, 0 otherwise.
Parameters
----------
year : int
Returns
-------
is_leap : bool
"""
return ((year & 0x3) == 0 and # year % 4 == 0
((year % 100) != 0 or (year % 400) == 0))
@cython.wraparound(False)
@cython.boundscheck(False)
cpdef int32_t get_week_of_year(int year, int month, int day) noexcept nogil:
"""
Return the ordinal week-of-year for the given day.
Parameters
----------
year : int
month : int
day : int
Returns
-------
week_of_year : int32_t
Notes
-----
Assumes the inputs describe a valid date.
"""
return get_iso_calendar(year, month, day)[1]
@cython.wraparound(False)
@cython.boundscheck(False)
cpdef iso_calendar_t get_iso_calendar(int year, int month, int day) noexcept nogil:
"""
Return the year, week, and day of year corresponding to ISO 8601
Parameters
----------
year : int
month : int
day : int
Returns
-------
year : int32_t
week : int32_t
day : int32_t
Notes
-----
Assumes the inputs describe a valid date.
"""
cdef:
int32_t doy, dow
int32_t iso_year, iso_week
doy = get_day_of_year(year, month, day)
dow = dayofweek(year, month, day)
# estimate
iso_week = (doy - 1) - dow + 3
if iso_week >= 0:
iso_week = iso_week // 7 + 1
# verify
if iso_week < 0:
if (iso_week > -2) or (iso_week == -2 and is_leapyear(year - 1)):
iso_week = 53
else:
iso_week = 52
elif iso_week == 53:
if 31 - day + dow < 3:
iso_week = 1
iso_year = year
if iso_week == 1 and month == 12:
iso_year += 1
elif iso_week >= 52 and month == 1:
iso_year -= 1
return iso_year, iso_week, dow + 1
@cython.wraparound(False)
@cython.boundscheck(False)
cpdef int32_t get_day_of_year(int year, int month, int day) noexcept nogil:
"""
Return the ordinal day-of-year for the given day.
Parameters
----------
year : int
month : int
day : int
Returns
-------
day_of_year : int32_t
Notes
-----
Assumes the inputs describe a valid date.
"""
cdef:
bint isleap
int32_t mo_off
int day_of_year
isleap = is_leapyear(year)
mo_off = month_offset[isleap * 13 + month - 1]
day_of_year = mo_off + day
return day_of_year
# ---------------------------------------------------------------------
# Business Helpers
cpdef int get_lastbday(int year, int month) noexcept nogil:
"""
Find the last day of the month that is a business day.
Parameters
----------
year : int
month : int
Returns
-------
last_bday : int
"""
cdef:
int wkday, days_in_month
wkday = dayofweek(year, month, 1)
days_in_month = get_days_in_month(year, month)
return days_in_month - max(((wkday + days_in_month - 1) % 7) - 4, 0)
cpdef int get_firstbday(int year, int month) noexcept nogil:
"""
Find the first day of the month that is a business day.
Parameters
----------
year : int
month : int
Returns
-------
first_bday : int
"""
cdef:
int first, wkday
wkday = dayofweek(year, month, 1)
first = 1
if wkday == 5: # on Saturday
first = 3
elif wkday == 6: # on Sunday
first = 2
return first

View File

@@ -0,0 +1,62 @@
from cpython.datetime cimport (
datetime,
tzinfo,
)
from numpy cimport (
int32_t,
int64_t,
ndarray,
)
from pandas._libs.tslibs.np_datetime cimport (
NPY_DATETIMEUNIT,
npy_datetimestruct,
)
from pandas._libs.tslibs.timestamps cimport _Timestamp
from pandas._libs.tslibs.timezones cimport tz_compare
cdef class _TSObject:
cdef readonly:
npy_datetimestruct dts # npy_datetimestruct
int64_t value # numpy dt64
tzinfo tzinfo
bint fold
NPY_DATETIMEUNIT creso
cdef int64_t ensure_reso(self, NPY_DATETIMEUNIT creso, str val=*) except? -1
cdef _TSObject convert_to_tsobject(object ts, tzinfo tz, str unit,
bint dayfirst, bint yearfirst,
int32_t nanos=*)
cdef _TSObject convert_datetime_to_tsobject(datetime ts, tzinfo tz,
int32_t nanos=*,
NPY_DATETIMEUNIT reso=*)
cdef _TSObject convert_str_to_tsobject(str ts, tzinfo tz, str unit,
bint dayfirst=*,
bint yearfirst=*)
cdef int64_t get_datetime64_nanos(object val, NPY_DATETIMEUNIT reso) except? -1
cpdef datetime localize_pydatetime(datetime dt, tzinfo tz)
cdef int64_t cast_from_unit(object ts, str unit, NPY_DATETIMEUNIT out_reso=*) except? -1
cpdef (int64_t, int) precision_from_unit(str unit, NPY_DATETIMEUNIT out_reso=*)
cdef maybe_localize_tso(_TSObject obj, tzinfo tz, NPY_DATETIMEUNIT reso)
cdef tzinfo convert_timezone(
tzinfo tz_in,
tzinfo tz_out,
bint found_naive,
bint found_tz,
bint utc_convert,
)
cdef int64_t parse_pydatetime(
datetime val,
npy_datetimestruct *dts,
bint utc_convert,
) except? -1

View File

@@ -0,0 +1,14 @@
from datetime import (
datetime,
tzinfo,
)
import numpy as np
DT64NS_DTYPE: np.dtype
TD64NS_DTYPE: np.dtype
def precision_from_unit(
unit: str,
) -> tuple[int, int]: ... # (int64_t, _)
def localize_pydatetime(dt: datetime, tz: tzinfo | None) -> datetime: ...

View File

@@ -0,0 +1,768 @@
import numpy as np
cimport numpy as cnp
from libc.math cimport log10
from numpy cimport (
int32_t,
int64_t,
)
cnp.import_array()
# stdlib datetime imports
from datetime import timezone
from cpython.datetime cimport (
PyDate_Check,
PyDateTime_Check,
datetime,
import_datetime,
time,
timedelta,
tzinfo,
)
import_datetime()
from pandas._libs.missing cimport checknull_with_nat_and_na
from pandas._libs.tslibs.base cimport ABCTimestamp
from pandas._libs.tslibs.dtypes cimport (
abbrev_to_npy_unit,
get_supported_reso,
periods_per_second,
)
from pandas._libs.tslibs.np_datetime cimport (
NPY_DATETIMEUNIT,
NPY_FR_ns,
NPY_FR_us,
check_dts_bounds,
convert_reso,
get_conversion_factor,
get_datetime64_unit,
get_datetime64_value,
get_implementation_bounds,
import_pandas_datetime,
npy_datetime,
npy_datetimestruct,
npy_datetimestruct_to_datetime,
pandas_datetime_to_datetimestruct,
pydatetime_to_dt64,
pydatetime_to_dtstruct,
string_to_dts,
)
import_pandas_datetime()
from pandas._libs.tslibs.np_datetime import OutOfBoundsDatetime
from pandas._libs.tslibs.nattype cimport (
NPY_NAT,
c_nat_strings as nat_strings,
)
from pandas._libs.tslibs.parsing cimport parse_datetime_string
from pandas._libs.tslibs.timezones cimport (
get_utcoffset,
is_utc,
)
from pandas._libs.tslibs.tzconversion cimport (
Localizer,
tz_localize_to_utc_single,
)
from pandas._libs.tslibs.util cimport (
is_datetime64_object,
is_float_object,
is_integer_object,
)
# ----------------------------------------------------------------------
# Constants
DT64NS_DTYPE = np.dtype("M8[ns]")
TD64NS_DTYPE = np.dtype("m8[ns]")
# ----------------------------------------------------------------------
# Unit Conversion Helpers
cdef int64_t cast_from_unit(
object ts,
str unit,
NPY_DATETIMEUNIT out_reso=NPY_FR_ns
) except? -1:
"""
Return a casting of the unit represented to nanoseconds
round the fractional part of a float to our precision, p.
Parameters
----------
ts : int, float, or None
unit : str
Returns
-------
int64_t
"""
cdef:
int64_t m
int p
if unit in ["Y", "M"]:
if is_float_object(ts) and not ts.is_integer():
# GH#47267 it is clear that 2 "M" corresponds to 1970-02-01,
# but not clear what 2.5 "M" corresponds to, so we will
# disallow that case.
raise ValueError(
f"Conversion of non-round float with unit={unit} "
"is ambiguous"
)
# GH#47266 go through np.datetime64 to avoid weird results e.g. with "Y"
# and 150 we'd get 2120-01-01 09:00:00
if is_float_object(ts):
ts = int(ts)
dt64obj = np.datetime64(ts, unit)
return get_datetime64_nanos(dt64obj, out_reso)
m, p = precision_from_unit(unit, out_reso)
# cast the unit, multiply base/frac separately
# to avoid precision issues from float -> int
try:
base = <int64_t>ts
except OverflowError as err:
raise OutOfBoundsDatetime(
f"cannot convert input {ts} with the unit '{unit}'"
) from err
frac = ts - base
if p:
frac = round(frac, p)
try:
return <int64_t>(base * m) + <int64_t>(frac * m)
except OverflowError as err:
raise OutOfBoundsDatetime(
f"cannot convert input {ts} with the unit '{unit}'"
) from err
cpdef inline (int64_t, int) precision_from_unit(
str unit,
NPY_DATETIMEUNIT out_reso=NPY_DATETIMEUNIT.NPY_FR_ns,
):
"""
Return a casting of the unit represented to nanoseconds + the precision
to round the fractional part.
Notes
-----
The caller is responsible for ensuring that the default value of "ns"
takes the place of None.
"""
cdef:
int64_t m
int64_t multiplier
int p
NPY_DATETIMEUNIT reso = abbrev_to_npy_unit(unit)
if reso == NPY_DATETIMEUNIT.NPY_FR_GENERIC:
reso = NPY_DATETIMEUNIT.NPY_FR_ns
if reso == NPY_DATETIMEUNIT.NPY_FR_Y:
# each 400 years we have 97 leap years, for an average of 97/400=.2425
# extra days each year. We get 31556952 by writing
# 3600*24*365.2425=31556952
multiplier = periods_per_second(out_reso)
m = multiplier * 31556952
elif reso == NPY_DATETIMEUNIT.NPY_FR_M:
# 2629746 comes from dividing the "Y" case by 12.
multiplier = periods_per_second(out_reso)
m = multiplier * 2629746
else:
# Careful: if get_conversion_factor raises, the exception does
# not propagate, instead we get a warning about an ignored exception.
# https://github.com/pandas-dev/pandas/pull/51483#discussion_r1115198951
m = get_conversion_factor(reso, out_reso)
p = <int>log10(m) # number of digits in 'm' minus 1
return m, p
cdef int64_t get_datetime64_nanos(object val, NPY_DATETIMEUNIT reso) except? -1:
"""
Extract the value and unit from a np.datetime64 object, then convert the
value to nanoseconds if necessary.
"""
cdef:
npy_datetimestruct dts
NPY_DATETIMEUNIT unit
npy_datetime ival
ival = get_datetime64_value(val)
if ival == NPY_NAT:
return NPY_NAT
unit = get_datetime64_unit(val)
if unit != reso:
pandas_datetime_to_datetimestruct(ival, unit, &dts)
check_dts_bounds(&dts, reso)
ival = npy_datetimestruct_to_datetime(reso, &dts)
return ival
# ----------------------------------------------------------------------
# _TSObject Conversion
# lightweight C object to hold datetime & int64 pair
cdef class _TSObject:
# cdef:
# npy_datetimestruct dts # npy_datetimestruct
# int64_t value # numpy dt64
# tzinfo tzinfo
# bint fold
# NPY_DATETIMEUNIT creso
def __cinit__(self):
# GH 25057. As per PEP 495, set fold to 0 by default
self.fold = 0
self.creso = NPY_FR_ns # default value
cdef int64_t ensure_reso(self, NPY_DATETIMEUNIT creso, str val=None) except? -1:
if self.creso != creso:
try:
self.value = convert_reso(self.value, self.creso, creso, False)
except OverflowError as err:
if val is not None:
raise OutOfBoundsDatetime(
f"Out of bounds nanosecond timestamp: {val}"
) from err
raise OutOfBoundsDatetime from err
self.creso = creso
return self.value
cdef _TSObject convert_to_tsobject(object ts, tzinfo tz, str unit,
bint dayfirst, bint yearfirst, int32_t nanos=0):
"""
Extract datetime and int64 from any of:
- np.int64 (with unit providing a possible modifier)
- np.datetime64
- a float (with unit providing a possible modifier)
- python int or long object (with unit providing a possible modifier)
- iso8601 string object
- python datetime object
- another timestamp object
Raises
------
OutOfBoundsDatetime : ts cannot be converted within implementation bounds
"""
cdef:
_TSObject obj
NPY_DATETIMEUNIT reso
obj = _TSObject()
if isinstance(ts, str):
return convert_str_to_tsobject(ts, tz, unit, dayfirst, yearfirst)
if checknull_with_nat_and_na(ts):
obj.value = NPY_NAT
elif is_datetime64_object(ts):
reso = get_supported_reso(get_datetime64_unit(ts))
obj.creso = reso
obj.value = get_datetime64_nanos(ts, reso)
if obj.value != NPY_NAT:
pandas_datetime_to_datetimestruct(obj.value, reso, &obj.dts)
elif is_integer_object(ts):
try:
ts = <int64_t>ts
except OverflowError:
# GH#26651 re-raise as OutOfBoundsDatetime
raise OutOfBoundsDatetime(f"Out of bounds nanosecond timestamp {ts}")
if ts == NPY_NAT:
obj.value = NPY_NAT
else:
if unit is None:
unit = "ns"
in_reso = abbrev_to_npy_unit(unit)
reso = get_supported_reso(in_reso)
ts = cast_from_unit(ts, unit, reso)
obj.value = ts
obj.creso = reso
pandas_datetime_to_datetimestruct(ts, reso, &obj.dts)
elif is_float_object(ts):
if ts != ts or ts == NPY_NAT:
obj.value = NPY_NAT
else:
ts = cast_from_unit(ts, unit)
obj.value = ts
pandas_datetime_to_datetimestruct(ts, NPY_FR_ns, &obj.dts)
elif PyDateTime_Check(ts):
if nanos == 0:
if isinstance(ts, ABCTimestamp):
reso = abbrev_to_npy_unit(ts.unit) # TODO: faster way to do this?
else:
# TODO: what if user explicitly passes nanos=0?
reso = NPY_FR_us
else:
reso = NPY_FR_ns
return convert_datetime_to_tsobject(ts, tz, nanos, reso=reso)
elif PyDate_Check(ts):
# Keep the converter same as PyDateTime's
# For date object we give the lowest supported resolution, i.e. "s"
ts = datetime.combine(ts, time())
return convert_datetime_to_tsobject(
ts, tz, nanos=0, reso=NPY_DATETIMEUNIT.NPY_FR_s
)
else:
from .period import Period
if isinstance(ts, Period):
raise ValueError("Cannot convert Period to Timestamp "
"unambiguously. Use to_timestamp")
raise TypeError(f"Cannot convert input [{ts}] of type {type(ts)} to "
f"Timestamp")
maybe_localize_tso(obj, tz, obj.creso)
return obj
cdef maybe_localize_tso(_TSObject obj, tzinfo tz, NPY_DATETIMEUNIT reso):
if tz is not None:
_localize_tso(obj, tz, reso)
if obj.value != NPY_NAT:
# check_overflows needs to run after _localize_tso
check_dts_bounds(&obj.dts, reso)
check_overflows(obj, reso)
cdef _TSObject convert_datetime_to_tsobject(
datetime ts,
tzinfo tz,
int32_t nanos=0,
NPY_DATETIMEUNIT reso=NPY_FR_ns,
):
"""
Convert a datetime (or Timestamp) input `ts`, along with optional timezone
object `tz` to a _TSObject.
The optional argument `nanos` allows for cases where datetime input
needs to be supplemented with higher-precision information.
Parameters
----------
ts : datetime or Timestamp
Value to be converted to _TSObject
tz : tzinfo or None
timezone for the timezone-aware output
nanos : int32_t, default is 0
nanoseconds supplement the precision of the datetime input ts
reso : NPY_DATETIMEUNIT, default NPY_FR_ns
Returns
-------
obj : _TSObject
"""
cdef:
_TSObject obj = _TSObject()
int64_t pps
obj.creso = reso
obj.fold = ts.fold
if tz is not None:
if ts.tzinfo is not None:
# Convert the current timezone to the passed timezone
ts = ts.astimezone(tz)
pydatetime_to_dtstruct(ts, &obj.dts)
obj.tzinfo = ts.tzinfo
elif not is_utc(tz):
ts = _localize_pydatetime(ts, tz)
pydatetime_to_dtstruct(ts, &obj.dts)
obj.tzinfo = ts.tzinfo
else:
# UTC
pydatetime_to_dtstruct(ts, &obj.dts)
obj.tzinfo = tz
else:
pydatetime_to_dtstruct(ts, &obj.dts)
obj.tzinfo = ts.tzinfo
if isinstance(ts, ABCTimestamp):
obj.dts.ps = ts.nanosecond * 1000
if nanos:
obj.dts.ps = nanos * 1000
obj.value = npy_datetimestruct_to_datetime(reso, &obj.dts)
if obj.tzinfo is not None and not is_utc(obj.tzinfo):
offset = get_utcoffset(obj.tzinfo, ts)
pps = periods_per_second(reso)
obj.value -= int(offset.total_seconds() * pps)
check_dts_bounds(&obj.dts, reso)
check_overflows(obj, reso)
return obj
cdef _TSObject _create_tsobject_tz_using_offset(npy_datetimestruct dts,
int tzoffset, tzinfo tz=None,
NPY_DATETIMEUNIT reso=NPY_FR_ns):
"""
Convert a datetimestruct `dts`, along with initial timezone offset
`tzoffset` to a _TSObject (with timezone object `tz` - optional).
Parameters
----------
dts : npy_datetimestruct
tzoffset : int
tz : tzinfo or None
timezone for the timezone-aware output.
reso : NPY_DATETIMEUNIT, default NPY_FR_ns
Returns
-------
obj : _TSObject
"""
cdef:
_TSObject obj = _TSObject()
int64_t value # numpy dt64
datetime dt
Py_ssize_t pos
value = npy_datetimestruct_to_datetime(reso, &dts)
obj.dts = dts
obj.tzinfo = timezone(timedelta(minutes=tzoffset))
obj.value = tz_localize_to_utc_single(
value, obj.tzinfo, ambiguous=None, nonexistent=None, creso=reso
)
obj.creso = reso
if tz is None:
check_overflows(obj, reso)
return obj
cdef:
Localizer info = Localizer(tz, reso)
# Infer fold from offset-adjusted obj.value
# see PEP 495 https://www.python.org/dev/peps/pep-0495/#the-fold-attribute
if info.use_utc:
pass
elif info.use_tzlocal:
info.utc_val_to_local_val(obj.value, &pos, &obj.fold)
elif info.use_dst and not info.use_pytz:
# i.e. dateutil
info.utc_val_to_local_val(obj.value, &pos, &obj.fold)
# Keep the converter same as PyDateTime's
dt = datetime(obj.dts.year, obj.dts.month, obj.dts.day,
obj.dts.hour, obj.dts.min, obj.dts.sec,
obj.dts.us, obj.tzinfo, fold=obj.fold)
obj = convert_datetime_to_tsobject(
dt, tz, nanos=obj.dts.ps // 1000)
obj.ensure_reso(reso) # TODO: more performant to get reso right up front?
return obj
cdef _TSObject convert_str_to_tsobject(str ts, tzinfo tz, str unit,
bint dayfirst=False,
bint yearfirst=False):
"""
Convert a string input `ts`, along with optional timezone object`tz`
to a _TSObject.
The optional arguments `dayfirst` and `yearfirst` are passed to the
dateutil parser.
Parameters
----------
ts : str
Value to be converted to _TSObject
tz : tzinfo or None
timezone for the timezone-aware output
unit : str or None
dayfirst : bool, default False
When parsing an ambiguous date string, interpret e.g. "3/4/1975" as
April 3, as opposed to the standard US interpretation March 4.
yearfirst : bool, default False
When parsing an ambiguous date string, interpret e.g. "01/05/09"
as "May 9, 2001", as opposed to the default "Jan 5, 2009"
Returns
-------
obj : _TSObject
"""
cdef:
npy_datetimestruct dts
int out_local = 0, out_tzoffset = 0, string_to_dts_failed
datetime dt
int64_t ival
NPY_DATETIMEUNIT out_bestunit, reso
if len(ts) == 0 or ts in nat_strings:
obj = _TSObject()
obj.value = NPY_NAT
obj.tzinfo = tz
return obj
elif ts == "now":
# Issue 9000, we short-circuit rather than going
# into np_datetime_strings which returns utc
dt = datetime.now(tz)
elif ts == "today":
# Issue 9000, we short-circuit rather than going
# into np_datetime_strings which returns a normalized datetime
dt = datetime.now(tz)
# equiv: datetime.today().replace(tzinfo=tz)
else:
string_to_dts_failed = string_to_dts(
ts, &dts, &out_bestunit, &out_local,
&out_tzoffset, False
)
if not string_to_dts_failed:
reso = get_supported_reso(out_bestunit)
check_dts_bounds(&dts, reso)
if out_local == 1:
return _create_tsobject_tz_using_offset(
dts, out_tzoffset, tz, reso
)
else:
ival = npy_datetimestruct_to_datetime(reso, &dts)
if tz is not None:
# shift for _localize_tso
ival = tz_localize_to_utc_single(
ival, tz, ambiguous="raise", nonexistent=None, creso=reso
)
obj = _TSObject()
obj.dts = dts
obj.value = ival
obj.creso = reso
maybe_localize_tso(obj, tz, obj.creso)
return obj
dt = parse_datetime_string(
ts, dayfirst=dayfirst, yearfirst=yearfirst, out_bestunit=&out_bestunit
)
reso = get_supported_reso(out_bestunit)
return convert_datetime_to_tsobject(dt, tz, nanos=0, reso=reso)
return convert_datetime_to_tsobject(dt, tz)
cdef check_overflows(_TSObject obj, NPY_DATETIMEUNIT reso=NPY_FR_ns):
"""
Check that we haven't silently overflowed in timezone conversion
Parameters
----------
obj : _TSObject
reso : NPY_DATETIMEUNIT, default NPY_FR_ns
Returns
-------
None
Raises
------
OutOfBoundsDatetime
"""
# GH#12677
cdef:
npy_datetimestruct lb, ub
get_implementation_bounds(reso, &lb, &ub)
if obj.dts.year == lb.year:
if not (obj.value < 0):
from pandas._libs.tslibs.timestamps import Timestamp
fmt = (f"{obj.dts.year}-{obj.dts.month:02d}-{obj.dts.day:02d} "
f"{obj.dts.hour:02d}:{obj.dts.min:02d}:{obj.dts.sec:02d}")
raise OutOfBoundsDatetime(
f"Converting {fmt} underflows past {Timestamp.min}"
)
elif obj.dts.year == ub.year:
if not (obj.value > 0):
from pandas._libs.tslibs.timestamps import Timestamp
fmt = (f"{obj.dts.year}-{obj.dts.month:02d}-{obj.dts.day:02d} "
f"{obj.dts.hour:02d}:{obj.dts.min:02d}:{obj.dts.sec:02d}")
raise OutOfBoundsDatetime(
f"Converting {fmt} overflows past {Timestamp.max}"
)
# ----------------------------------------------------------------------
# Localization
cdef void _localize_tso(_TSObject obj, tzinfo tz, NPY_DATETIMEUNIT reso) noexcept:
"""
Given the UTC nanosecond timestamp in obj.value, find the wall-clock
representation of that timestamp in the given timezone.
Parameters
----------
obj : _TSObject
tz : tzinfo
reso : NPY_DATETIMEUNIT
Returns
-------
None
Notes
-----
Sets obj.tzinfo inplace, alters obj.dts inplace.
"""
cdef:
int64_t local_val
Py_ssize_t outpos = -1
Localizer info = Localizer(tz, reso)
assert obj.tzinfo is None
if info.use_utc:
pass
elif obj.value == NPY_NAT:
pass
else:
local_val = info.utc_val_to_local_val(obj.value, &outpos, &obj.fold)
if info.use_pytz:
# infer we went through a pytz path, will have outpos!=-1
tz = tz._tzinfos[tz._transition_info[outpos]]
pandas_datetime_to_datetimestruct(local_val, reso, &obj.dts)
obj.tzinfo = tz
cdef datetime _localize_pydatetime(datetime dt, tzinfo tz):
"""
Take a datetime/Timestamp in UTC and localizes to timezone tz.
NB: Unlike the public version, this treats datetime and Timestamp objects
identically, i.e. discards nanos from Timestamps.
It also assumes that the `tz` input is not None.
"""
try:
# datetime.replace with pytz may be incorrect result
return tz.localize(dt)
except AttributeError:
return dt.replace(tzinfo=tz)
cpdef inline datetime localize_pydatetime(datetime dt, tzinfo tz):
"""
Take a datetime/Timestamp in UTC and localizes to timezone tz.
Parameters
----------
dt : datetime or Timestamp
tz : tzinfo or None
Returns
-------
localized : datetime or Timestamp
"""
if tz is None:
return dt
elif isinstance(dt, ABCTimestamp):
return dt.tz_localize(tz)
return _localize_pydatetime(dt, tz)
cdef tzinfo convert_timezone(
tzinfo tz_in,
tzinfo tz_out,
bint found_naive,
bint found_tz,
bint utc_convert,
):
"""
Validate that ``tz_in`` can be converted/localized to ``tz_out``.
Parameters
----------
tz_in : tzinfo or None
Timezone info of element being processed.
tz_out : tzinfo or None
Timezone info of output.
found_naive : bool
Whether a timezone-naive element has been found so far.
found_tz : bool
Whether a timezone-aware element has been found so far.
utc_convert : bool
Whether to convert/localize to UTC.
Returns
-------
tz_info
Timezone info of output.
Raises
------
ValueError
If ``tz_in`` can't be converted/localized to ``tz_out``.
"""
if tz_in is not None:
if utc_convert:
pass
elif found_naive:
raise ValueError("Tz-aware datetime.datetime "
"cannot be converted to "
"datetime64 unless utc=True")
elif tz_out is not None and not tz_compare(tz_out, tz_in):
raise ValueError("Tz-aware datetime.datetime "
"cannot be converted to "
"datetime64 unless utc=True")
else:
tz_out = tz_in
else:
if found_tz and not utc_convert:
raise ValueError("Cannot mix tz-aware with "
"tz-naive values")
return tz_out
cdef int64_t parse_pydatetime(
datetime val,
npy_datetimestruct *dts,
bint utc_convert,
) except? -1:
"""
Convert pydatetime to datetime64.
Parameters
----------
val : datetime
Element being processed.
dts : *npy_datetimestruct
Needed to use in pydatetime_to_dt64, which writes to it.
utc_convert : bool
Whether to convert/localize to UTC.
Raises
------
OutOfBoundsDatetime
"""
cdef:
_TSObject _ts
int64_t result
if val.tzinfo is not None:
if utc_convert:
_ts = convert_datetime_to_tsobject(val, None)
_ts.ensure_reso(NPY_FR_ns)
result = _ts.value
else:
_ts = convert_datetime_to_tsobject(val, None)
_ts.ensure_reso(NPY_FR_ns)
result = _ts.value
else:
if isinstance(val, ABCTimestamp):
result = val.as_unit("ns")._value
else:
result = pydatetime_to_dt64(val, dts)
check_dts_bounds(dts)
return result

View File

@@ -0,0 +1,109 @@
from numpy cimport int64_t
from pandas._libs.tslibs.np_datetime cimport NPY_DATETIMEUNIT
cpdef str npy_unit_to_abbrev(NPY_DATETIMEUNIT unit)
cpdef NPY_DATETIMEUNIT abbrev_to_npy_unit(str abbrev)
cdef NPY_DATETIMEUNIT freq_group_code_to_npy_unit(int freq) noexcept nogil
cpdef int64_t periods_per_day(NPY_DATETIMEUNIT reso=*) except? -1
cpdef int64_t periods_per_second(NPY_DATETIMEUNIT reso) except? -1
cpdef NPY_DATETIMEUNIT get_supported_reso(NPY_DATETIMEUNIT reso)
cpdef bint is_supported_unit(NPY_DATETIMEUNIT reso)
cdef dict attrname_to_abbrevs
cdef dict npy_unit_to_attrname
cdef dict attrname_to_npy_unit
cdef enum c_FreqGroup:
# Mirrors FreqGroup in the .pyx file
FR_ANN = 1000
FR_QTR = 2000
FR_MTH = 3000
FR_WK = 4000
FR_BUS = 5000
FR_DAY = 6000
FR_HR = 7000
FR_MIN = 8000
FR_SEC = 9000
FR_MS = 10000
FR_US = 11000
FR_NS = 12000
FR_UND = -10000 # undefined
cdef enum c_Resolution:
# Mirrors Resolution in the .pyx file
RESO_NS = 0
RESO_US = 1
RESO_MS = 2
RESO_SEC = 3
RESO_MIN = 4
RESO_HR = 5
RESO_DAY = 6
RESO_MTH = 7
RESO_QTR = 8
RESO_YR = 9
cdef enum PeriodDtypeCode:
# Annual freqs with various fiscal year ends.
# eg, 2005 for A_FEB runs Mar 1, 2004 to Feb 28, 2005
A = 1000 # Default alias
A_DEC = 1000 # Annual - December year end
A_JAN = 1001 # Annual - January year end
A_FEB = 1002 # Annual - February year end
A_MAR = 1003 # Annual - March year end
A_APR = 1004 # Annual - April year end
A_MAY = 1005 # Annual - May year end
A_JUN = 1006 # Annual - June year end
A_JUL = 1007 # Annual - July year end
A_AUG = 1008 # Annual - August year end
A_SEP = 1009 # Annual - September year end
A_OCT = 1010 # Annual - October year end
A_NOV = 1011 # Annual - November year end
# Quarterly frequencies with various fiscal year ends.
# eg, Q42005 for Q_OCT runs Aug 1, 2005 to Oct 31, 2005
Q_DEC = 2000 # Quarterly - December year end
Q_JAN = 2001 # Quarterly - January year end
Q_FEB = 2002 # Quarterly - February year end
Q_MAR = 2003 # Quarterly - March year end
Q_APR = 2004 # Quarterly - April year end
Q_MAY = 2005 # Quarterly - May year end
Q_JUN = 2006 # Quarterly - June year end
Q_JUL = 2007 # Quarterly - July year end
Q_AUG = 2008 # Quarterly - August year end
Q_SEP = 2009 # Quarterly - September year end
Q_OCT = 2010 # Quarterly - October year end
Q_NOV = 2011 # Quarterly - November year end
M = 3000 # Monthly
W_SUN = 4000 # Weekly - Sunday end of week
W_MON = 4001 # Weekly - Monday end of week
W_TUE = 4002 # Weekly - Tuesday end of week
W_WED = 4003 # Weekly - Wednesday end of week
W_THU = 4004 # Weekly - Thursday end of week
W_FRI = 4005 # Weekly - Friday end of week
W_SAT = 4006 # Weekly - Saturday end of week
B = 5000 # Business days
D = 6000 # Daily
H = 7000 # Hourly
T = 8000 # Minutely
S = 9000 # Secondly
L = 10000 # Millisecondly
U = 11000 # Microsecondly
N = 12000 # Nanosecondly
UNDEFINED = -10_000
cdef class PeriodDtypeBase:
cdef readonly:
PeriodDtypeCode _dtype_code
int64_t _n
cpdef int _get_to_timestamp_base(self)
cpdef bint _is_tick_like(self)

View File

@@ -0,0 +1,88 @@
from enum import Enum
# These are not public API, but are exposed in the .pyi file because they
# are imported in tests.
_attrname_to_abbrevs: dict[str, str]
_period_code_map: dict[str, int]
def periods_per_day(reso: int) -> int: ...
def periods_per_second(reso: int) -> int: ...
def is_supported_unit(reso: int) -> bool: ...
def npy_unit_to_abbrev(reso: int) -> str: ...
def get_supported_reso(reso: int) -> int: ...
def abbrev_to_npy_unit(abbrev: str) -> int: ...
class PeriodDtypeBase:
_dtype_code: int # PeriodDtypeCode
_n: int
# actually __cinit__
def __new__(cls, code: int, n: int): ...
@property
def _freq_group_code(self) -> int: ...
@property
def _resolution_obj(self) -> Resolution: ...
def _get_to_timestamp_base(self) -> int: ...
@property
def _freqstr(self) -> str: ...
def __hash__(self) -> int: ...
def _is_tick_like(self) -> bool: ...
@property
def _creso(self) -> int: ...
@property
def _td64_unit(self) -> str: ...
class FreqGroup(Enum):
FR_ANN: int
FR_QTR: int
FR_MTH: int
FR_WK: int
FR_BUS: int
FR_DAY: int
FR_HR: int
FR_MIN: int
FR_SEC: int
FR_MS: int
FR_US: int
FR_NS: int
FR_UND: int
@staticmethod
def from_period_dtype_code(code: int) -> FreqGroup: ...
class Resolution(Enum):
RESO_NS: int
RESO_US: int
RESO_MS: int
RESO_SEC: int
RESO_MIN: int
RESO_HR: int
RESO_DAY: int
RESO_MTH: int
RESO_QTR: int
RESO_YR: int
def __lt__(self, other: Resolution) -> bool: ...
def __ge__(self, other: Resolution) -> bool: ...
@property
def attrname(self) -> str: ...
@classmethod
def from_attrname(cls, attrname: str) -> Resolution: ...
@classmethod
def get_reso_from_freqstr(cls, freq: str) -> Resolution: ...
@property
def attr_abbrev(self) -> str: ...
class NpyDatetimeUnit(Enum):
NPY_FR_Y: int
NPY_FR_M: int
NPY_FR_W: int
NPY_FR_D: int
NPY_FR_h: int
NPY_FR_m: int
NPY_FR_s: int
NPY_FR_ms: int
NPY_FR_us: int
NPY_FR_ns: int
NPY_FR_ps: int
NPY_FR_fs: int
NPY_FR_as: int
NPY_FR_GENERIC: int

View File

@@ -0,0 +1,468 @@
# period frequency constants corresponding to scikits timeseries
# originals
from enum import Enum
from pandas._libs.tslibs.np_datetime cimport (
NPY_DATETIMEUNIT,
get_conversion_factor,
import_pandas_datetime,
)
import_pandas_datetime()
cdef class PeriodDtypeBase:
"""
Similar to an actual dtype, this contains all of the information
describing a PeriodDtype in an integer code.
"""
# cdef readonly:
# PeriodDtypeCode _dtype_code
# int64_t _n
def __cinit__(self, PeriodDtypeCode code, int64_t n):
self._dtype_code = code
self._n = n
def __eq__(self, other):
if not isinstance(other, PeriodDtypeBase):
return False
if not isinstance(self, PeriodDtypeBase):
# cython semantics, this is a reversed op
return False
return self._dtype_code == other._dtype_code and self._n == other._n
def __hash__(self) -> int:
return hash((self._n, self._dtype_code))
@property
def _freq_group_code(self) -> int:
# See also: libperiod.get_freq_group
return (self._dtype_code // 1000) * 1000
@property
def _resolution_obj(self) -> "Resolution":
fgc = self._freq_group_code
freq_group = FreqGroup(fgc)
abbrev = _reverse_period_code_map[freq_group.value].split("-")[0]
if abbrev == "B":
return Resolution.RESO_DAY
attrname = _abbrev_to_attrnames[abbrev]
return Resolution.from_attrname(attrname)
@property
def _freqstr(self) -> str:
# Will be passed to to_offset in Period._maybe_convert_freq
out = _reverse_period_code_map.get(self._dtype_code)
if self._n == 1:
return out
return str(self._n) + out
cpdef int _get_to_timestamp_base(self):
"""
Return frequency code group used for base of to_timestamp against
frequency code.
Return day freq code against longer freq than day.
Return second freq code against hour between second.
Returns
-------
int
"""
base = <c_FreqGroup>self._dtype_code
if base < FR_BUS:
return FR_DAY
elif FR_HR <= base <= FR_SEC:
return FR_SEC
return base
cpdef bint _is_tick_like(self):
return self._dtype_code >= PeriodDtypeCode.D
@property
def _creso(self) -> int:
return {
PeriodDtypeCode.D: NPY_DATETIMEUNIT.NPY_FR_D,
PeriodDtypeCode.H: NPY_DATETIMEUNIT.NPY_FR_h,
PeriodDtypeCode.T: NPY_DATETIMEUNIT.NPY_FR_m,
PeriodDtypeCode.S: NPY_DATETIMEUNIT.NPY_FR_s,
PeriodDtypeCode.L: NPY_DATETIMEUNIT.NPY_FR_ms,
PeriodDtypeCode.U: NPY_DATETIMEUNIT.NPY_FR_us,
PeriodDtypeCode.N: NPY_DATETIMEUNIT.NPY_FR_ns,
}[self._dtype_code]
@property
def _td64_unit(self) -> str:
return npy_unit_to_abbrev(self._creso)
_period_code_map = {
# Annual freqs with various fiscal year ends.
# eg, 2005 for A-FEB runs Mar 1, 2004 to Feb 28, 2005
"A-DEC": PeriodDtypeCode.A_DEC, # Annual - December year end
"A-JAN": PeriodDtypeCode.A_JAN, # Annual - January year end
"A-FEB": PeriodDtypeCode.A_FEB, # Annual - February year end
"A-MAR": PeriodDtypeCode.A_MAR, # Annual - March year end
"A-APR": PeriodDtypeCode.A_APR, # Annual - April year end
"A-MAY": PeriodDtypeCode.A_MAY, # Annual - May year end
"A-JUN": PeriodDtypeCode.A_JUN, # Annual - June year end
"A-JUL": PeriodDtypeCode.A_JUL, # Annual - July year end
"A-AUG": PeriodDtypeCode.A_AUG, # Annual - August year end
"A-SEP": PeriodDtypeCode.A_SEP, # Annual - September year end
"A-OCT": PeriodDtypeCode.A_OCT, # Annual - October year end
"A-NOV": PeriodDtypeCode.A_NOV, # Annual - November year end
# Quarterly frequencies with various fiscal year ends.
# eg, Q42005 for Q-OCT runs Aug 1, 2005 to Oct 31, 2005
"Q-DEC": PeriodDtypeCode.Q_DEC, # Quarterly - December year end
"Q-JAN": PeriodDtypeCode.Q_JAN, # Quarterly - January year end
"Q-FEB": PeriodDtypeCode.Q_FEB, # Quarterly - February year end
"Q-MAR": PeriodDtypeCode.Q_MAR, # Quarterly - March year end
"Q-APR": PeriodDtypeCode.Q_APR, # Quarterly - April year end
"Q-MAY": PeriodDtypeCode.Q_MAY, # Quarterly - May year end
"Q-JUN": PeriodDtypeCode.Q_JUN, # Quarterly - June year end
"Q-JUL": PeriodDtypeCode.Q_JUL, # Quarterly - July year end
"Q-AUG": PeriodDtypeCode.Q_AUG, # Quarterly - August year end
"Q-SEP": PeriodDtypeCode.Q_SEP, # Quarterly - September year end
"Q-OCT": PeriodDtypeCode.Q_OCT, # Quarterly - October year end
"Q-NOV": PeriodDtypeCode.Q_NOV, # Quarterly - November year end
"M": PeriodDtypeCode.M, # Monthly
"W-SUN": PeriodDtypeCode.W_SUN, # Weekly - Sunday end of week
"W-MON": PeriodDtypeCode.W_MON, # Weekly - Monday end of week
"W-TUE": PeriodDtypeCode.W_TUE, # Weekly - Tuesday end of week
"W-WED": PeriodDtypeCode.W_WED, # Weekly - Wednesday end of week
"W-THU": PeriodDtypeCode.W_THU, # Weekly - Thursday end of week
"W-FRI": PeriodDtypeCode.W_FRI, # Weekly - Friday end of week
"W-SAT": PeriodDtypeCode.W_SAT, # Weekly - Saturday end of week
"B": PeriodDtypeCode.B, # Business days
"D": PeriodDtypeCode.D, # Daily
"H": PeriodDtypeCode.H, # Hourly
"T": PeriodDtypeCode.T, # Minutely
"S": PeriodDtypeCode.S, # Secondly
"L": PeriodDtypeCode.L, # Millisecondly
"U": PeriodDtypeCode.U, # Microsecondly
"N": PeriodDtypeCode.N, # Nanosecondly
}
_reverse_period_code_map = {
_period_code_map[key]: key for key in _period_code_map}
# Yearly aliases; careful not to put these in _reverse_period_code_map
_period_code_map.update({"Y" + key[1:]: _period_code_map[key]
for key in _period_code_map
if key.startswith("A-")})
_period_code_map.update({
"Q": 2000, # Quarterly - December year end (default quarterly)
"A": PeriodDtypeCode.A, # Annual
"W": 4000, # Weekly
"C": 5000, # Custom Business Day
})
cdef set _month_names = {
x.split("-")[-1] for x in _period_code_map.keys() if x.startswith("A-")
}
# Map attribute-name resolutions to resolution abbreviations
_attrname_to_abbrevs = {
"year": "A",
"quarter": "Q",
"month": "M",
"day": "D",
"hour": "H",
"minute": "T",
"second": "S",
"millisecond": "L",
"microsecond": "U",
"nanosecond": "N",
}
cdef dict attrname_to_abbrevs = _attrname_to_abbrevs
cdef dict _abbrev_to_attrnames = {v: k for k, v in attrname_to_abbrevs.items()}
class FreqGroup(Enum):
# Mirrors c_FreqGroup in the .pxd file
FR_ANN = c_FreqGroup.FR_ANN
FR_QTR = c_FreqGroup.FR_QTR
FR_MTH = c_FreqGroup.FR_MTH
FR_WK = c_FreqGroup.FR_WK
FR_BUS = c_FreqGroup.FR_BUS
FR_DAY = c_FreqGroup.FR_DAY
FR_HR = c_FreqGroup.FR_HR
FR_MIN = c_FreqGroup.FR_MIN
FR_SEC = c_FreqGroup.FR_SEC
FR_MS = c_FreqGroup.FR_MS
FR_US = c_FreqGroup.FR_US
FR_NS = c_FreqGroup.FR_NS
FR_UND = c_FreqGroup.FR_UND # undefined
@staticmethod
def from_period_dtype_code(code: int) -> "FreqGroup":
# See also: PeriodDtypeBase._freq_group_code
code = (code // 1000) * 1000
return FreqGroup(code)
class Resolution(Enum):
RESO_NS = c_Resolution.RESO_NS
RESO_US = c_Resolution.RESO_US
RESO_MS = c_Resolution.RESO_MS
RESO_SEC = c_Resolution.RESO_SEC
RESO_MIN = c_Resolution.RESO_MIN
RESO_HR = c_Resolution.RESO_HR
RESO_DAY = c_Resolution.RESO_DAY
RESO_MTH = c_Resolution.RESO_MTH
RESO_QTR = c_Resolution.RESO_QTR
RESO_YR = c_Resolution.RESO_YR
def __lt__(self, other):
return self.value < other.value
def __ge__(self, other):
return self.value >= other.value
@property
def attr_abbrev(self) -> str:
# string that we can pass to to_offset
return _attrname_to_abbrevs[self.attrname]
@property
def attrname(self) -> str:
"""
Return datetime attribute name corresponding to this Resolution.
Examples
--------
>>> Resolution.RESO_SEC.attrname
'second'
"""
return _reso_str_map[self.value]
@classmethod
def from_attrname(cls, attrname: str) -> "Resolution":
"""
Return resolution str against resolution code.
Examples
--------
>>> Resolution.from_attrname('second')
<Resolution.RESO_SEC: 3>
>>> Resolution.from_attrname('second') == Resolution.RESO_SEC
True
"""
return cls(_str_reso_map[attrname])
@classmethod
def get_reso_from_freqstr(cls, freq: str) -> "Resolution":
"""
Return resolution code against frequency str.
`freq` is given by the `offset.freqstr` for some DateOffset object.
Examples
--------
>>> Resolution.get_reso_from_freqstr('H')
<Resolution.RESO_HR: 5>
>>> Resolution.get_reso_from_freqstr('H') == Resolution.RESO_HR
True
"""
try:
attr_name = _abbrev_to_attrnames[freq]
except KeyError:
# For quarterly and yearly resolutions, we need to chop off
# a month string.
split_freq = freq.split("-")
if len(split_freq) != 2:
raise
if split_freq[1] not in _month_names:
# i.e. we want e.g. "Q-DEC", not "Q-INVALID"
raise
attr_name = _abbrev_to_attrnames[split_freq[0]]
return cls.from_attrname(attr_name)
class NpyDatetimeUnit(Enum):
"""
Python-space analogue to NPY_DATETIMEUNIT.
"""
NPY_FR_Y = NPY_DATETIMEUNIT.NPY_FR_Y
NPY_FR_M = NPY_DATETIMEUNIT.NPY_FR_M
NPY_FR_W = NPY_DATETIMEUNIT.NPY_FR_W
NPY_FR_D = NPY_DATETIMEUNIT.NPY_FR_D
NPY_FR_h = NPY_DATETIMEUNIT.NPY_FR_h
NPY_FR_m = NPY_DATETIMEUNIT.NPY_FR_m
NPY_FR_s = NPY_DATETIMEUNIT.NPY_FR_s
NPY_FR_ms = NPY_DATETIMEUNIT.NPY_FR_ms
NPY_FR_us = NPY_DATETIMEUNIT.NPY_FR_us
NPY_FR_ns = NPY_DATETIMEUNIT.NPY_FR_ns
NPY_FR_ps = NPY_DATETIMEUNIT.NPY_FR_ps
NPY_FR_fs = NPY_DATETIMEUNIT.NPY_FR_fs
NPY_FR_as = NPY_DATETIMEUNIT.NPY_FR_as
NPY_FR_GENERIC = NPY_DATETIMEUNIT.NPY_FR_GENERIC
cpdef NPY_DATETIMEUNIT get_supported_reso(NPY_DATETIMEUNIT reso):
# If we have an unsupported reso, return the nearest supported reso.
if reso == NPY_DATETIMEUNIT.NPY_FR_GENERIC:
# TODO: or raise ValueError? trying this gives unraisable errors, but
# "except? -1" breaks at compile-time for unknown reasons
return NPY_DATETIMEUNIT.NPY_FR_ns
if reso < NPY_DATETIMEUNIT.NPY_FR_s:
return NPY_DATETIMEUNIT.NPY_FR_s
elif reso > NPY_DATETIMEUNIT.NPY_FR_ns:
return NPY_DATETIMEUNIT.NPY_FR_ns
return reso
cpdef bint is_supported_unit(NPY_DATETIMEUNIT reso):
return (
reso == NPY_DATETIMEUNIT.NPY_FR_ns
or reso == NPY_DATETIMEUNIT.NPY_FR_us
or reso == NPY_DATETIMEUNIT.NPY_FR_ms
or reso == NPY_DATETIMEUNIT.NPY_FR_s
)
cpdef str npy_unit_to_abbrev(NPY_DATETIMEUNIT unit):
if unit == NPY_DATETIMEUNIT.NPY_FR_ns or unit == NPY_DATETIMEUNIT.NPY_FR_GENERIC:
# generic -> default to nanoseconds
return "ns"
elif unit == NPY_DATETIMEUNIT.NPY_FR_us:
return "us"
elif unit == NPY_DATETIMEUNIT.NPY_FR_ms:
return "ms"
elif unit == NPY_DATETIMEUNIT.NPY_FR_s:
return "s"
elif unit == NPY_DATETIMEUNIT.NPY_FR_m:
return "m"
elif unit == NPY_DATETIMEUNIT.NPY_FR_h:
return "h"
elif unit == NPY_DATETIMEUNIT.NPY_FR_D:
return "D"
elif unit == NPY_DATETIMEUNIT.NPY_FR_W:
return "W"
elif unit == NPY_DATETIMEUNIT.NPY_FR_M:
return "M"
elif unit == NPY_DATETIMEUNIT.NPY_FR_Y:
return "Y"
# Checks for not-really-supported units go at the end, as we don't expect
# to see these often
elif unit == NPY_DATETIMEUNIT.NPY_FR_ps:
return "ps"
elif unit == NPY_DATETIMEUNIT.NPY_FR_fs:
return "fs"
elif unit == NPY_DATETIMEUNIT.NPY_FR_as:
return "as"
else:
raise NotImplementedError(unit)
cpdef NPY_DATETIMEUNIT abbrev_to_npy_unit(str abbrev):
if abbrev == "Y":
return NPY_DATETIMEUNIT.NPY_FR_Y
elif abbrev == "M":
return NPY_DATETIMEUNIT.NPY_FR_M
elif abbrev == "W":
return NPY_DATETIMEUNIT.NPY_FR_W
elif abbrev == "D" or abbrev == "d":
return NPY_DATETIMEUNIT.NPY_FR_D
elif abbrev == "h":
return NPY_DATETIMEUNIT.NPY_FR_h
elif abbrev == "m":
return NPY_DATETIMEUNIT.NPY_FR_m
elif abbrev == "s":
return NPY_DATETIMEUNIT.NPY_FR_s
elif abbrev == "ms":
return NPY_DATETIMEUNIT.NPY_FR_ms
elif abbrev == "us":
return NPY_DATETIMEUNIT.NPY_FR_us
elif abbrev == "ns":
return NPY_DATETIMEUNIT.NPY_FR_ns
elif abbrev == "ps":
return NPY_DATETIMEUNIT.NPY_FR_ps
elif abbrev == "fs":
return NPY_DATETIMEUNIT.NPY_FR_fs
elif abbrev == "as":
return NPY_DATETIMEUNIT.NPY_FR_as
elif abbrev is None:
return NPY_DATETIMEUNIT.NPY_FR_GENERIC
else:
raise ValueError(f"Unrecognized unit {abbrev}")
cdef NPY_DATETIMEUNIT freq_group_code_to_npy_unit(int freq) noexcept nogil:
"""
Convert the freq to the corresponding NPY_DATETIMEUNIT to pass
to npy_datetimestruct_to_datetime.
"""
if freq == FR_MTH:
return NPY_DATETIMEUNIT.NPY_FR_M
elif freq == FR_DAY:
return NPY_DATETIMEUNIT.NPY_FR_D
elif freq == FR_HR:
return NPY_DATETIMEUNIT.NPY_FR_h
elif freq == FR_MIN:
return NPY_DATETIMEUNIT.NPY_FR_m
elif freq == FR_SEC:
return NPY_DATETIMEUNIT.NPY_FR_s
elif freq == FR_MS:
return NPY_DATETIMEUNIT.NPY_FR_ms
elif freq == FR_US:
return NPY_DATETIMEUNIT.NPY_FR_us
elif freq == FR_NS:
return NPY_DATETIMEUNIT.NPY_FR_ns
elif freq == FR_UND:
# Default to Day
return NPY_DATETIMEUNIT.NPY_FR_D
# TODO: use in _matplotlib.converter?
cpdef int64_t periods_per_day(
NPY_DATETIMEUNIT reso=NPY_DATETIMEUNIT.NPY_FR_ns
) except? -1:
"""
How many of the given time units fit into a single day?
"""
return get_conversion_factor(NPY_DATETIMEUNIT.NPY_FR_D, reso)
cpdef int64_t periods_per_second(NPY_DATETIMEUNIT reso) except? -1:
return get_conversion_factor(NPY_DATETIMEUNIT.NPY_FR_s, reso)
cdef dict _reso_str_map = {
Resolution.RESO_NS.value: "nanosecond",
Resolution.RESO_US.value: "microsecond",
Resolution.RESO_MS.value: "millisecond",
Resolution.RESO_SEC.value: "second",
Resolution.RESO_MIN.value: "minute",
Resolution.RESO_HR.value: "hour",
Resolution.RESO_DAY.value: "day",
Resolution.RESO_MTH.value: "month",
Resolution.RESO_QTR.value: "quarter",
Resolution.RESO_YR.value: "year",
}
cdef dict _str_reso_map = {v: k for k, v in _reso_str_map.items()}
cdef dict npy_unit_to_attrname = {
NPY_DATETIMEUNIT.NPY_FR_Y: "year",
NPY_DATETIMEUNIT.NPY_FR_M: "month",
NPY_DATETIMEUNIT.NPY_FR_D: "day",
NPY_DATETIMEUNIT.NPY_FR_h: "hour",
NPY_DATETIMEUNIT.NPY_FR_m: "minute",
NPY_DATETIMEUNIT.NPY_FR_s: "second",
NPY_DATETIMEUNIT.NPY_FR_ms: "millisecond",
NPY_DATETIMEUNIT.NPY_FR_us: "microsecond",
NPY_DATETIMEUNIT.NPY_FR_ns: "nanosecond",
}
cdef dict attrname_to_npy_unit = {v: k for k, v in npy_unit_to_attrname.items()}

View File

@@ -0,0 +1,62 @@
import numpy as np
from pandas._typing import npt
def build_field_sarray(
dtindex: npt.NDArray[np.int64], # const int64_t[:]
reso: int, # NPY_DATETIMEUNIT
) -> np.ndarray: ...
def month_position_check(fields, weekdays) -> str | None: ...
def get_date_name_field(
dtindex: npt.NDArray[np.int64], # const int64_t[:]
field: str,
locale: str | None = ...,
reso: int = ..., # NPY_DATETIMEUNIT
) -> npt.NDArray[np.object_]: ...
def get_start_end_field(
dtindex: npt.NDArray[np.int64],
field: str,
freqstr: str | None = ...,
month_kw: int = ...,
reso: int = ..., # NPY_DATETIMEUNIT
) -> npt.NDArray[np.bool_]: ...
def get_date_field(
dtindex: npt.NDArray[np.int64], # const int64_t[:]
field: str,
reso: int = ..., # NPY_DATETIMEUNIT
) -> npt.NDArray[np.int32]: ...
def get_timedelta_field(
tdindex: npt.NDArray[np.int64], # const int64_t[:]
field: str,
reso: int = ..., # NPY_DATETIMEUNIT
) -> npt.NDArray[np.int32]: ...
def get_timedelta_days(
tdindex: npt.NDArray[np.int64], # const int64_t[:]
reso: int = ..., # NPY_DATETIMEUNIT
) -> npt.NDArray[np.int64]: ...
def isleapyear_arr(
years: np.ndarray,
) -> npt.NDArray[np.bool_]: ...
def build_isocalendar_sarray(
dtindex: npt.NDArray[np.int64], # const int64_t[:]
reso: int, # NPY_DATETIMEUNIT
) -> np.ndarray: ...
def _get_locale_names(name_type: str, locale: str | None = ...): ...
class RoundTo:
@property
def MINUS_INFTY(self) -> int: ...
@property
def PLUS_INFTY(self) -> int: ...
@property
def NEAREST_HALF_EVEN(self) -> int: ...
@property
def NEAREST_HALF_PLUS_INFTY(self) -> int: ...
@property
def NEAREST_HALF_MINUS_INFTY(self) -> int: ...
def round_nsint64(
values: npt.NDArray[np.int64],
mode: RoundTo,
nanos: int,
) -> npt.NDArray[np.int64]: ...

View File

@@ -0,0 +1,818 @@
"""
Functions for accessing attributes of Timestamp/datetime64/datetime-like
objects and arrays
"""
from locale import LC_TIME
from _strptime import LocaleTime
cimport cython
from cython cimport Py_ssize_t
import numpy as np
cimport numpy as cnp
from numpy cimport (
int8_t,
int32_t,
int64_t,
ndarray,
uint32_t,
)
cnp.import_array()
from pandas._config.localization import set_locale
from pandas._libs.tslibs.ccalendar import (
DAYS_FULL,
MONTHS_FULL,
)
from pandas._libs.tslibs.ccalendar cimport (
dayofweek,
get_day_of_year,
get_days_in_month,
get_firstbday,
get_iso_calendar,
get_lastbday,
get_week_of_year,
iso_calendar_t,
)
from pandas._libs.tslibs.nattype cimport NPY_NAT
from pandas._libs.tslibs.np_datetime cimport (
NPY_DATETIMEUNIT,
NPY_FR_ns,
import_pandas_datetime,
npy_datetimestruct,
pandas_datetime_to_datetimestruct,
pandas_timedelta_to_timedeltastruct,
pandas_timedeltastruct,
)
import_pandas_datetime()
@cython.wraparound(False)
@cython.boundscheck(False)
def build_field_sarray(const int64_t[:] dtindex, NPY_DATETIMEUNIT reso):
"""
Datetime as int64 representation to a structured array of fields
"""
cdef:
Py_ssize_t i, count = len(dtindex)
npy_datetimestruct dts
ndarray[int32_t] years, months, days, hours, minutes, seconds, mus
sa_dtype = [
("Y", "i4"), # year
("M", "i4"), # month
("D", "i4"), # day
("h", "i4"), # hour
("m", "i4"), # min
("s", "i4"), # second
("u", "i4"), # microsecond
]
out = np.empty(count, dtype=sa_dtype)
years = out["Y"]
months = out["M"]
days = out["D"]
hours = out["h"]
minutes = out["m"]
seconds = out["s"]
mus = out["u"]
for i in range(count):
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
years[i] = dts.year
months[i] = dts.month
days[i] = dts.day
hours[i] = dts.hour
minutes[i] = dts.min
seconds[i] = dts.sec
mus[i] = dts.us
return out
def month_position_check(fields, weekdays) -> str | None:
cdef:
int32_t daysinmonth, y, m, d
bint calendar_end = True
bint business_end = True
bint calendar_start = True
bint business_start = True
bint cal
int32_t[:] years = fields["Y"]
int32_t[:] months = fields["M"]
int32_t[:] days = fields["D"]
for y, m, d, wd in zip(years, months, days, weekdays):
if calendar_start:
calendar_start &= d == 1
if business_start:
business_start &= d == 1 or (d <= 3 and wd == 0)
if calendar_end or business_end:
daysinmonth = get_days_in_month(y, m)
cal = d == daysinmonth
if calendar_end:
calendar_end &= cal
if business_end:
business_end &= cal or (daysinmonth - d < 3 and wd == 4)
elif not calendar_start and not business_start:
break
if calendar_end:
return "ce"
elif business_end:
return "be"
elif calendar_start:
return "cs"
elif business_start:
return "bs"
else:
return None
@cython.wraparound(False)
@cython.boundscheck(False)
def get_date_name_field(
const int64_t[:] dtindex,
str field,
object locale=None,
NPY_DATETIMEUNIT reso=NPY_FR_ns,
):
"""
Given a int64-based datetime index, return array of strings of date
name based on requested field (e.g. day_name)
"""
cdef:
Py_ssize_t i
cnp.npy_intp count = dtindex.shape[0]
ndarray[object] out, names
npy_datetimestruct dts
int dow
out = cnp.PyArray_EMPTY(1, &count, cnp.NPY_OBJECT, 0)
if field == "day_name":
if locale is None:
names = np.array(DAYS_FULL, dtype=np.object_)
else:
names = np.array(_get_locale_names("f_weekday", locale),
dtype=np.object_)
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = np.nan
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
dow = dayofweek(dts.year, dts.month, dts.day)
out[i] = names[dow].capitalize()
elif field == "month_name":
if locale is None:
names = np.array(MONTHS_FULL, dtype=np.object_)
else:
names = np.array(_get_locale_names("f_month", locale),
dtype=np.object_)
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = np.nan
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
out[i] = names[dts.month].capitalize()
else:
raise ValueError(f"Field {field} not supported")
return out
cdef bint _is_on_month(int month, int compare_month, int modby) noexcept nogil:
"""
Analogous to DateOffset.is_on_offset checking for the month part of a date.
"""
if modby == 1:
return True
elif modby == 3:
return (month - compare_month) % 3 == 0
else:
return month == compare_month
@cython.wraparound(False)
@cython.boundscheck(False)
def get_start_end_field(
const int64_t[:] dtindex,
str field,
str freqstr=None,
int month_kw=12,
NPY_DATETIMEUNIT reso=NPY_FR_ns,
):
"""
Given an int64-based datetime index return array of indicators
of whether timestamps are at the start/end of the month/quarter/year
(defined by frequency).
Parameters
----------
dtindex : ndarray[int64]
field : str
frestr : str or None, default None
month_kw : int, default 12
reso : NPY_DATETIMEUNIT, default NPY_FR_ns
Returns
-------
ndarray[bool]
"""
cdef:
Py_ssize_t i
int count = dtindex.shape[0]
bint is_business = 0
int end_month = 12
int start_month = 1
ndarray[int8_t] out
npy_datetimestruct dts
int compare_month, modby
out = np.zeros(count, dtype="int8")
if freqstr:
if freqstr == "C":
raise ValueError(f"Custom business days is not supported by {field}")
is_business = freqstr[0] == "B"
# YearBegin(), BYearBegin() use month = starting month of year.
# QuarterBegin(), BQuarterBegin() use startingMonth = starting
# month of year. Other offsets use month, startingMonth as ending
# month of year.
if (freqstr[0:2] in ["MS", "QS", "AS"]) or (
freqstr[1:3] in ["MS", "QS", "AS"]):
end_month = 12 if month_kw == 1 else month_kw - 1
start_month = month_kw
else:
end_month = month_kw
start_month = (end_month % 12) + 1
else:
end_month = 12
start_month = 1
compare_month = start_month if "start" in field else end_month
if "month" in field:
modby = 1
elif "quarter" in field:
modby = 3
else:
modby = 12
if field in ["is_month_start", "is_quarter_start", "is_year_start"]:
if is_business:
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = 0
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
if _is_on_month(dts.month, compare_month, modby) and (
dts.day == get_firstbday(dts.year, dts.month)):
out[i] = 1
else:
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = 0
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
if _is_on_month(dts.month, compare_month, modby) and dts.day == 1:
out[i] = 1
elif field in ["is_month_end", "is_quarter_end", "is_year_end"]:
if is_business:
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = 0
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
if _is_on_month(dts.month, compare_month, modby) and (
dts.day == get_lastbday(dts.year, dts.month)):
out[i] = 1
else:
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = 0
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
if _is_on_month(dts.month, compare_month, modby) and (
dts.day == get_days_in_month(dts.year, dts.month)):
out[i] = 1
else:
raise ValueError(f"Field {field} not supported")
return out.view(bool)
@cython.wraparound(False)
@cython.boundscheck(False)
def get_date_field(
const int64_t[:] dtindex,
str field,
NPY_DATETIMEUNIT reso=NPY_FR_ns,
):
"""
Given a int64-based datetime index, extract the year, month, etc.,
field and return an array of these values.
"""
cdef:
Py_ssize_t i, count = len(dtindex)
ndarray[int32_t] out
npy_datetimestruct dts
out = np.empty(count, dtype="i4")
if field == "Y":
with nogil:
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = -1
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
out[i] = dts.year
return out
elif field == "M":
with nogil:
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = -1
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
out[i] = dts.month
return out
elif field == "D":
with nogil:
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = -1
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
out[i] = dts.day
return out
elif field == "h":
with nogil:
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = -1
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
out[i] = dts.hour
# TODO: can we de-dup with period.pyx <accessor>s?
return out
elif field == "m":
with nogil:
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = -1
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
out[i] = dts.min
return out
elif field == "s":
with nogil:
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = -1
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
out[i] = dts.sec
return out
elif field == "us":
with nogil:
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = -1
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
out[i] = dts.us
return out
elif field == "ns":
with nogil:
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = -1
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
out[i] = dts.ps // 1000
return out
elif field == "doy":
with nogil:
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = -1
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
out[i] = get_day_of_year(dts.year, dts.month, dts.day)
return out
elif field == "dow":
with nogil:
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = -1
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
out[i] = dayofweek(dts.year, dts.month, dts.day)
return out
elif field == "woy":
with nogil:
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = -1
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
out[i] = get_week_of_year(dts.year, dts.month, dts.day)
return out
elif field == "q":
with nogil:
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = -1
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
out[i] = dts.month
out[i] = ((out[i] - 1) // 3) + 1
return out
elif field == "dim":
with nogil:
for i in range(count):
if dtindex[i] == NPY_NAT:
out[i] = -1
continue
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
out[i] = get_days_in_month(dts.year, dts.month)
return out
elif field == "is_leap_year":
return isleapyear_arr(get_date_field(dtindex, "Y", reso=reso))
raise ValueError(f"Field {field} not supported")
@cython.wraparound(False)
@cython.boundscheck(False)
def get_timedelta_field(
const int64_t[:] tdindex,
str field,
NPY_DATETIMEUNIT reso=NPY_FR_ns,
):
"""
Given a int64-based timedelta index, extract the days, hrs, sec.,
field and return an array of these values.
"""
cdef:
Py_ssize_t i, count = len(tdindex)
ndarray[int32_t] out
pandas_timedeltastruct tds
out = np.empty(count, dtype="i4")
if field == "seconds":
with nogil:
for i in range(count):
if tdindex[i] == NPY_NAT:
out[i] = -1
continue
pandas_timedelta_to_timedeltastruct(tdindex[i], reso, &tds)
out[i] = tds.seconds
return out
elif field == "microseconds":
with nogil:
for i in range(count):
if tdindex[i] == NPY_NAT:
out[i] = -1
continue
pandas_timedelta_to_timedeltastruct(tdindex[i], reso, &tds)
out[i] = tds.microseconds
return out
elif field == "nanoseconds":
with nogil:
for i in range(count):
if tdindex[i] == NPY_NAT:
out[i] = -1
continue
pandas_timedelta_to_timedeltastruct(tdindex[i], reso, &tds)
out[i] = tds.nanoseconds
return out
raise ValueError(f"Field {field} not supported")
@cython.wraparound(False)
@cython.boundscheck(False)
def get_timedelta_days(
const int64_t[:] tdindex,
NPY_DATETIMEUNIT reso=NPY_FR_ns,
):
"""
Given a int64-based timedelta index, extract the days,
field and return an array of these values.
"""
cdef:
Py_ssize_t i, count = len(tdindex)
ndarray[int64_t] out
pandas_timedeltastruct tds
out = np.empty(count, dtype="i8")
with nogil:
for i in range(count):
if tdindex[i] == NPY_NAT:
out[i] = -1
continue
pandas_timedelta_to_timedeltastruct(tdindex[i], reso, &tds)
out[i] = tds.days
return out
cpdef isleapyear_arr(ndarray years):
"""vectorized version of isleapyear; NaT evaluates as False"""
cdef:
ndarray[int8_t] out
out = np.zeros(len(years), dtype="int8")
out[np.logical_or(years % 400 == 0,
np.logical_and(years % 4 == 0,
years % 100 > 0))] = 1
return out.view(bool)
@cython.wraparound(False)
@cython.boundscheck(False)
def build_isocalendar_sarray(const int64_t[:] dtindex, NPY_DATETIMEUNIT reso):
"""
Given a int64-based datetime array, return the ISO 8601 year, week, and day
as a structured array.
"""
cdef:
Py_ssize_t i, count = len(dtindex)
npy_datetimestruct dts
ndarray[uint32_t] iso_years, iso_weeks, days
iso_calendar_t ret_val
sa_dtype = [
("year", "u4"),
("week", "u4"),
("day", "u4"),
]
out = np.empty(count, dtype=sa_dtype)
iso_years = out["year"]
iso_weeks = out["week"]
days = out["day"]
with nogil:
for i in range(count):
if dtindex[i] == NPY_NAT:
ret_val = 0, 0, 0
else:
pandas_datetime_to_datetimestruct(dtindex[i], reso, &dts)
ret_val = get_iso_calendar(dts.year, dts.month, dts.day)
iso_years[i] = ret_val[0]
iso_weeks[i] = ret_val[1]
days[i] = ret_val[2]
return out
def _get_locale_names(name_type: str, locale: object = None):
"""
Returns an array of localized day or month names.
Parameters
----------
name_type : str
Attribute of LocaleTime() in which to return localized names.
locale : str
Returns
-------
list of locale names
"""
with set_locale(locale, LC_TIME):
return getattr(LocaleTime(), name_type)
# ---------------------------------------------------------------------
# Rounding
class RoundTo:
"""
enumeration defining the available rounding modes
Attributes
----------
MINUS_INFTY
round towards -∞, or floor [2]_
PLUS_INFTY
round towards +∞, or ceil [3]_
NEAREST_HALF_EVEN
round to nearest, tie-break half to even [6]_
NEAREST_HALF_MINUS_INFTY
round to nearest, tie-break half to -∞ [5]_
NEAREST_HALF_PLUS_INFTY
round to nearest, tie-break half to +∞ [4]_
References
----------
.. [1] "Rounding - Wikipedia"
https://en.wikipedia.org/wiki/Rounding
.. [2] "Rounding down"
https://en.wikipedia.org/wiki/Rounding#Rounding_down
.. [3] "Rounding up"
https://en.wikipedia.org/wiki/Rounding#Rounding_up
.. [4] "Round half up"
https://en.wikipedia.org/wiki/Rounding#Round_half_up
.. [5] "Round half down"
https://en.wikipedia.org/wiki/Rounding#Round_half_down
.. [6] "Round half to even"
https://en.wikipedia.org/wiki/Rounding#Round_half_to_even
"""
@property
def MINUS_INFTY(self) -> int:
return 0
@property
def PLUS_INFTY(self) -> int:
return 1
@property
def NEAREST_HALF_EVEN(self) -> int:
return 2
@property
def NEAREST_HALF_PLUS_INFTY(self) -> int:
return 3
@property
def NEAREST_HALF_MINUS_INFTY(self) -> int:
return 4
cdef ndarray[int64_t] _floor_int64(const int64_t[:] values, int64_t unit):
cdef:
Py_ssize_t i, n = len(values)
ndarray[int64_t] result = np.empty(n, dtype="i8")
int64_t res, value
with cython.overflowcheck(True):
for i in range(n):
value = values[i]
if value == NPY_NAT:
res = NPY_NAT
else:
res = value - value % unit
result[i] = res
return result
cdef ndarray[int64_t] _ceil_int64(const int64_t[:] values, int64_t unit):
cdef:
Py_ssize_t i, n = len(values)
ndarray[int64_t] result = np.empty(n, dtype="i8")
int64_t res, value, remainder
with cython.overflowcheck(True):
for i in range(n):
value = values[i]
if value == NPY_NAT:
res = NPY_NAT
else:
remainder = value % unit
if remainder == 0:
res = value
else:
res = value + (unit - remainder)
result[i] = res
return result
cdef ndarray[int64_t] _rounddown_int64(values, int64_t unit):
return _ceil_int64(values - unit // 2, unit)
cdef ndarray[int64_t] _roundup_int64(values, int64_t unit):
return _floor_int64(values + unit // 2, unit)
cdef ndarray[int64_t] _round_nearest_int64(const int64_t[:] values, int64_t unit):
cdef:
Py_ssize_t i, n = len(values)
ndarray[int64_t] result = np.empty(n, dtype="i8")
int64_t res, value, half, remainder, quotient
half = unit // 2
with cython.overflowcheck(True):
for i in range(n):
value = values[i]
if value == NPY_NAT:
res = NPY_NAT
else:
quotient, remainder = divmod(value, unit)
if remainder > half:
res = value + (unit - remainder)
elif remainder == half and quotient % 2:
res = value + (unit - remainder)
else:
res = value - remainder
result[i] = res
return result
def round_nsint64(values: np.ndarray, mode: RoundTo, nanos: int) -> np.ndarray:
"""
Applies rounding mode at given frequency
Parameters
----------
values : np.ndarray[int64_t]`
mode : instance of `RoundTo` enumeration
nanos : np.int64
Freq to round to, expressed in nanoseconds
Returns
-------
np.ndarray[int64_t]
"""
cdef:
int64_t unit = nanos
if mode == RoundTo.MINUS_INFTY:
return _floor_int64(values, unit)
elif mode == RoundTo.PLUS_INFTY:
return _ceil_int64(values, unit)
elif mode == RoundTo.NEAREST_HALF_MINUS_INFTY:
return _rounddown_int64(values, unit)
elif mode == RoundTo.NEAREST_HALF_PLUS_INFTY:
return _roundup_int64(values, unit)
elif mode == RoundTo.NEAREST_HALF_EVEN:
# for odd unit there is no need of a tie break
if unit % 2:
return _rounddown_int64(values, unit)
return _round_nearest_int64(values, unit)
# if/elif above should catch all rounding modes defined in enum 'RoundTo':
# if flow of control arrives here, it is a bug
raise ValueError("round_nsint64 called with an unrecognized rounding mode")

View File

@@ -0,0 +1,36 @@
tslibs_sources = {
# Dict of extension name -> dict of {sources, include_dirs, and deps}
# numpy include dir is implicitly included
'base': {'sources': ['base.pyx']},
'ccalendar': {'sources': ['ccalendar.pyx']},
'dtypes': {'sources': ['dtypes.pyx']},
'conversion': {'sources': ['conversion.pyx']},
'fields': {'sources': ['fields.pyx']},
'nattype': {'sources': ['nattype.pyx']},
'np_datetime': {'sources': ['np_datetime.pyx']},
'offsets': {'sources': ['offsets.pyx']},
'parsing': {'sources': ['parsing.pyx', '../src/parser/tokenizer.c']},
'period': {'sources': ['period.pyx']},
'strptime': {'sources': ['strptime.pyx']},
'timedeltas': {'sources': ['timedeltas.pyx']},
'timestamps': {'sources': ['timestamps.pyx']},
'timezones': {'sources': ['timezones.pyx']},
'tzconversion': {'sources': ['tzconversion.pyx']},
'vectorized': {'sources': ['vectorized.pyx']},
}
foreach ext_name, ext_dict : tslibs_sources
py.extension_module(
ext_name,
ext_dict.get('sources'),
cython_args: ['--include-dir', meson.current_build_dir(), '-X always_allow_keywords=true'],
include_directories: [inc_np, inc_pd],
dependencies: ext_dict.get('deps', ''),
subdir: 'pandas/_libs/tslibs',
install: true
)
endforeach
py.install_sources('__init__.py',
pure: false,
subdir: 'pandas/_libs/tslibs')

View File

@@ -0,0 +1,18 @@
from cpython.datetime cimport datetime
from numpy cimport int64_t
cdef int64_t NPY_NAT
cdef set c_nat_strings
cdef class _NaT(datetime):
cdef readonly:
int64_t _value
cdef _NaT c_NaT
cdef bint checknull_with_nat(object val)
cdef bint is_dt64nat(object val)
cdef bint is_td64nat(object val)

View File

@@ -0,0 +1,135 @@
from datetime import (
datetime,
timedelta,
tzinfo as _tzinfo,
)
import typing
import numpy as np
from pandas._libs.tslibs.period import Period
NaT: NaTType
iNaT: int
nat_strings: set[str]
_NaTComparisonTypes: typing.TypeAlias = (
datetime | timedelta | Period | np.datetime64 | np.timedelta64
)
class _NatComparison:
def __call__(self, other: _NaTComparisonTypes) -> bool: ...
class NaTType:
_value: np.int64
@property
def value(self) -> int: ...
@property
def asm8(self) -> np.datetime64: ...
def to_datetime64(self) -> np.datetime64: ...
def to_numpy(
self, dtype: np.dtype | str | None = ..., copy: bool = ...
) -> np.datetime64 | np.timedelta64: ...
@property
def is_leap_year(self) -> bool: ...
@property
def is_month_start(self) -> bool: ...
@property
def is_quarter_start(self) -> bool: ...
@property
def is_year_start(self) -> bool: ...
@property
def is_month_end(self) -> bool: ...
@property
def is_quarter_end(self) -> bool: ...
@property
def is_year_end(self) -> bool: ...
@property
def day_of_year(self) -> float: ...
@property
def dayofyear(self) -> float: ...
@property
def days_in_month(self) -> float: ...
@property
def daysinmonth(self) -> float: ...
@property
def day_of_week(self) -> float: ...
@property
def dayofweek(self) -> float: ...
@property
def week(self) -> float: ...
@property
def weekofyear(self) -> float: ...
def day_name(self) -> float: ...
def month_name(self) -> float: ...
def weekday(self) -> float: ...
def isoweekday(self) -> float: ...
def total_seconds(self) -> float: ...
def today(self, *args, **kwargs) -> NaTType: ...
def now(self, *args, **kwargs) -> NaTType: ...
def to_pydatetime(self) -> NaTType: ...
def date(self) -> NaTType: ...
def round(self) -> NaTType: ...
def floor(self) -> NaTType: ...
def ceil(self) -> NaTType: ...
@property
def tzinfo(self) -> None: ...
@property
def tz(self) -> None: ...
def tz_convert(self, tz: _tzinfo | str | None) -> NaTType: ...
def tz_localize(
self,
tz: _tzinfo | str | None,
ambiguous: str = ...,
nonexistent: str = ...,
) -> NaTType: ...
def replace(
self,
year: int | None = ...,
month: int | None = ...,
day: int | None = ...,
hour: int | None = ...,
minute: int | None = ...,
second: int | None = ...,
microsecond: int | None = ...,
nanosecond: int | None = ...,
tzinfo: _tzinfo | None = ...,
fold: int | None = ...,
) -> NaTType: ...
@property
def year(self) -> float: ...
@property
def quarter(self) -> float: ...
@property
def month(self) -> float: ...
@property
def day(self) -> float: ...
@property
def hour(self) -> float: ...
@property
def minute(self) -> float: ...
@property
def second(self) -> float: ...
@property
def millisecond(self) -> float: ...
@property
def microsecond(self) -> float: ...
@property
def nanosecond(self) -> float: ...
# inject Timedelta properties
@property
def days(self) -> float: ...
@property
def microseconds(self) -> float: ...
@property
def nanoseconds(self) -> float: ...
# inject Period properties
@property
def qyear(self) -> float: ...
def __eq__(self, other: object) -> bool: ...
def __ne__(self, other: object) -> bool: ...
__lt__: _NatComparison
__le__: _NatComparison
__gt__: _NatComparison
__ge__: _NatComparison
def as_unit(self, unit: str, round_ok: bool = ...) -> NaTType: ...

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,138 @@
cimport numpy as cnp
from cpython.datetime cimport (
date,
datetime,
)
from numpy cimport (
int32_t,
int64_t,
)
# TODO(cython3): most of these can be cimported directly from numpy
cdef extern from "numpy/ndarrayobject.h":
ctypedef int64_t npy_timedelta
ctypedef int64_t npy_datetime
cdef extern from "numpy/ndarraytypes.h":
ctypedef struct PyArray_DatetimeMetaData:
NPY_DATETIMEUNIT base
int64_t num
cdef extern from "numpy/arrayscalars.h":
ctypedef struct PyDatetimeScalarObject:
# PyObject_HEAD
npy_datetime obval
PyArray_DatetimeMetaData obmeta
ctypedef struct PyTimedeltaScalarObject:
# PyObject_HEAD
npy_timedelta obval
PyArray_DatetimeMetaData obmeta
cdef extern from "numpy/ndarraytypes.h":
ctypedef struct npy_datetimestruct:
int64_t year
int32_t month, day, hour, min, sec, us, ps, as
ctypedef enum NPY_DATETIMEUNIT:
NPY_FR_Y
NPY_FR_M
NPY_FR_W
NPY_FR_D
NPY_FR_B
NPY_FR_h
NPY_FR_m
NPY_FR_s
NPY_FR_ms
NPY_FR_us
NPY_FR_ns
NPY_FR_ps
NPY_FR_fs
NPY_FR_as
NPY_FR_GENERIC
int64_t NPY_DATETIME_NAT # elswhere we call this NPY_NAT
cdef extern from "pandas/datetime/pd_datetime.h":
ctypedef struct pandas_timedeltastruct:
int64_t days
int32_t hrs, min, sec, ms, us, ns, seconds, microseconds, nanoseconds
void pandas_datetime_to_datetimestruct(npy_datetime val,
NPY_DATETIMEUNIT fr,
npy_datetimestruct *result) nogil
npy_datetime npy_datetimestruct_to_datetime(NPY_DATETIMEUNIT fr,
npy_datetimestruct *d) nogil
void pandas_timedelta_to_timedeltastruct(npy_timedelta val,
NPY_DATETIMEUNIT fr,
pandas_timedeltastruct *result
) nogil
void PandasDateTime_IMPORT()
ctypedef enum FormatRequirement:
PARTIAL_MATCH
EXACT_MATCH
INFER_FORMAT
# You must call this before using the PandasDateTime CAPI functions
cdef inline void import_pandas_datetime() noexcept:
PandasDateTime_IMPORT
cdef bint cmp_scalar(int64_t lhs, int64_t rhs, int op) except -1
cdef check_dts_bounds(npy_datetimestruct *dts, NPY_DATETIMEUNIT unit=?)
cdef int64_t pydatetime_to_dt64(
datetime val, npy_datetimestruct *dts, NPY_DATETIMEUNIT reso=?
)
cdef void pydatetime_to_dtstruct(datetime dt, npy_datetimestruct *dts) noexcept
cdef int64_t pydate_to_dt64(
date val, npy_datetimestruct *dts, NPY_DATETIMEUNIT reso=?
)
cdef void pydate_to_dtstruct(date val, npy_datetimestruct *dts) noexcept
cdef npy_datetime get_datetime64_value(object obj) noexcept nogil
cdef npy_timedelta get_timedelta64_value(object obj) noexcept nogil
cdef NPY_DATETIMEUNIT get_datetime64_unit(object obj) noexcept nogil
cdef int string_to_dts(
str val,
npy_datetimestruct* dts,
NPY_DATETIMEUNIT* out_bestunit,
int* out_local,
int* out_tzoffset,
bint want_exc,
format: str | None = *,
bint exact = *
) except? -1
cdef NPY_DATETIMEUNIT get_unit_from_dtype(cnp.dtype dtype)
cpdef cnp.ndarray astype_overflowsafe(
cnp.ndarray values, # ndarray[datetime64[anyunit]]
cnp.dtype dtype, # ndarray[datetime64[anyunit]]
bint copy=*,
bint round_ok=*,
bint is_coerce=*,
)
cdef int64_t get_conversion_factor(
NPY_DATETIMEUNIT from_unit,
NPY_DATETIMEUNIT to_unit,
) except? -1
cdef bint cmp_dtstructs(npy_datetimestruct* left, npy_datetimestruct* right, int op)
cdef get_implementation_bounds(
NPY_DATETIMEUNIT reso, npy_datetimestruct *lower, npy_datetimestruct *upper
)
cdef int64_t convert_reso(
int64_t value,
NPY_DATETIMEUNIT from_reso,
NPY_DATETIMEUNIT to_reso,
bint round_ok,
) except? -1

View File

@@ -0,0 +1,21 @@
import numpy as np
from pandas._typing import npt
class OutOfBoundsDatetime(ValueError): ...
class OutOfBoundsTimedelta(ValueError): ...
# only exposed for testing
def py_get_unit_from_dtype(dtype: np.dtype): ...
def py_td64_to_tdstruct(td64: int, unit: int) -> dict: ...
def astype_overflowsafe(
arr: np.ndarray,
dtype: np.dtype,
copy: bool = ...,
round_ok: bool = ...,
is_coerce: bool = ...,
) -> np.ndarray: ...
def is_unitless(dtype: np.dtype) -> bool: ...
def compare_mismatched_resolutions(
left: np.ndarray, right: np.ndarray, op
) -> npt.NDArray[np.bool_]: ...

View File

@@ -0,0 +1,644 @@
cimport cython
from cpython.datetime cimport (
PyDateTime_CheckExact,
PyDateTime_DATE_GET_HOUR,
PyDateTime_DATE_GET_MICROSECOND,
PyDateTime_DATE_GET_MINUTE,
PyDateTime_DATE_GET_SECOND,
PyDateTime_GET_DAY,
PyDateTime_GET_MONTH,
PyDateTime_GET_YEAR,
import_datetime,
)
from cpython.object cimport (
Py_EQ,
Py_GE,
Py_GT,
Py_LE,
Py_LT,
Py_NE,
)
import_datetime()
PandasDateTime_IMPORT
import numpy as np
cimport numpy as cnp
cnp.import_array()
from numpy cimport (
int64_t,
ndarray,
uint8_t,
)
from pandas._libs.tslibs.util cimport get_c_string_buf_and_size
cdef extern from "pandas/datetime/pd_datetime.h":
int cmp_npy_datetimestruct(npy_datetimestruct *a,
npy_datetimestruct *b)
# AS, FS, PS versions exist but are not imported because they are not used.
npy_datetimestruct _NS_MIN_DTS, _NS_MAX_DTS
npy_datetimestruct _US_MIN_DTS, _US_MAX_DTS
npy_datetimestruct _MS_MIN_DTS, _MS_MAX_DTS
npy_datetimestruct _S_MIN_DTS, _S_MAX_DTS
npy_datetimestruct _M_MIN_DTS, _M_MAX_DTS
PyArray_DatetimeMetaData get_datetime_metadata_from_dtype(cnp.PyArray_Descr *dtype)
int parse_iso_8601_datetime(const char *str, int len, int want_exc,
npy_datetimestruct *out,
NPY_DATETIMEUNIT *out_bestunit,
int *out_local, int *out_tzoffset,
const char *format, int format_len,
FormatRequirement exact)
# ----------------------------------------------------------------------
# numpy object inspection
cdef npy_datetime get_datetime64_value(object obj) noexcept nogil:
"""
returns the int64 value underlying scalar numpy datetime64 object
Note that to interpret this as a datetime, the corresponding unit is
also needed. That can be found using `get_datetime64_unit`.
"""
return (<PyDatetimeScalarObject*>obj).obval
cdef npy_timedelta get_timedelta64_value(object obj) noexcept nogil:
"""
returns the int64 value underlying scalar numpy timedelta64 object
"""
return (<PyTimedeltaScalarObject*>obj).obval
cdef NPY_DATETIMEUNIT get_datetime64_unit(object obj) noexcept nogil:
"""
returns the unit part of the dtype for a numpy datetime64 object.
"""
return <NPY_DATETIMEUNIT>(<PyDatetimeScalarObject*>obj).obmeta.base
cdef NPY_DATETIMEUNIT get_unit_from_dtype(cnp.dtype dtype):
# NB: caller is responsible for ensuring this is *some* datetime64 or
# timedelta64 dtype, otherwise we can segfault
cdef:
cnp.PyArray_Descr* descr = <cnp.PyArray_Descr*>dtype
PyArray_DatetimeMetaData meta
meta = get_datetime_metadata_from_dtype(descr)
return meta.base
def py_get_unit_from_dtype(dtype):
# for testing get_unit_from_dtype; adds 896 bytes to the .so file.
return get_unit_from_dtype(dtype)
def is_unitless(dtype: cnp.dtype) -> bool:
"""
Check if a datetime64 or timedelta64 dtype has no attached unit.
"""
if dtype.type_num not in [cnp.NPY_DATETIME, cnp.NPY_TIMEDELTA]:
raise ValueError("is_unitless dtype must be datetime64 or timedelta64")
cdef:
NPY_DATETIMEUNIT unit = get_unit_from_dtype(dtype)
return unit == NPY_DATETIMEUNIT.NPY_FR_GENERIC
# ----------------------------------------------------------------------
# Comparison
cdef bint cmp_dtstructs(
npy_datetimestruct* left, npy_datetimestruct* right, int op
):
cdef:
int cmp_res
cmp_res = cmp_npy_datetimestruct(left, right)
if op == Py_EQ:
return cmp_res == 0
if op == Py_NE:
return cmp_res != 0
if op == Py_GT:
return cmp_res == 1
if op == Py_LT:
return cmp_res == -1
if op == Py_GE:
return cmp_res == 1 or cmp_res == 0
else:
# i.e. op == Py_LE
return cmp_res == -1 or cmp_res == 0
cdef bint cmp_scalar(int64_t lhs, int64_t rhs, int op) except -1:
"""
cmp_scalar is a more performant version of PyObject_RichCompare
typed for int64_t arguments.
"""
if op == Py_EQ:
return lhs == rhs
elif op == Py_NE:
return lhs != rhs
elif op == Py_LT:
return lhs < rhs
elif op == Py_LE:
return lhs <= rhs
elif op == Py_GT:
return lhs > rhs
elif op == Py_GE:
return lhs >= rhs
class OutOfBoundsDatetime(ValueError):
"""
Raised when the datetime is outside the range that can be represented.
Examples
--------
>>> pd.to_datetime("08335394550")
Traceback (most recent call last):
OutOfBoundsDatetime: Parsing "08335394550" to datetime overflows,
at position 0
"""
pass
class OutOfBoundsTimedelta(ValueError):
"""
Raised when encountering a timedelta value that cannot be represented.
Representation should be within a timedelta64[ns].
Examples
--------
>>> pd.date_range(start="1/1/1700", freq="B", periods=100000)
Traceback (most recent call last):
OutOfBoundsTimedelta: Cannot cast 139999 days 00:00:00
to unit='ns' without overflow.
"""
# Timedelta analogue to OutOfBoundsDatetime
pass
cdef get_implementation_bounds(
NPY_DATETIMEUNIT reso,
npy_datetimestruct *lower,
npy_datetimestruct *upper,
):
if reso == NPY_FR_ns:
upper[0] = _NS_MAX_DTS
lower[0] = _NS_MIN_DTS
elif reso == NPY_FR_us:
upper[0] = _US_MAX_DTS
lower[0] = _US_MIN_DTS
elif reso == NPY_FR_ms:
upper[0] = _MS_MAX_DTS
lower[0] = _MS_MIN_DTS
elif reso == NPY_FR_s:
upper[0] = _S_MAX_DTS
lower[0] = _S_MIN_DTS
elif reso == NPY_FR_m:
upper[0] = _M_MAX_DTS
lower[0] = _M_MIN_DTS
else:
raise NotImplementedError(reso)
cdef check_dts_bounds(npy_datetimestruct *dts, NPY_DATETIMEUNIT unit=NPY_FR_ns):
"""Raises OutOfBoundsDatetime if the given date is outside the range that
can be represented by nanosecond-resolution 64-bit integers."""
cdef:
bint error = False
npy_datetimestruct cmp_upper, cmp_lower
get_implementation_bounds(unit, &cmp_lower, &cmp_upper)
if cmp_npy_datetimestruct(dts, &cmp_lower) == -1:
error = True
elif cmp_npy_datetimestruct(dts, &cmp_upper) == 1:
error = True
if error:
fmt = (f"{dts.year}-{dts.month:02d}-{dts.day:02d} "
f"{dts.hour:02d}:{dts.min:02d}:{dts.sec:02d}")
# TODO: "nanosecond" in the message assumes NPY_FR_ns
raise OutOfBoundsDatetime(f"Out of bounds nanosecond timestamp: {fmt}")
# ----------------------------------------------------------------------
# Conversion
# just exposed for testing at the moment
def py_td64_to_tdstruct(int64_t td64, NPY_DATETIMEUNIT unit):
cdef:
pandas_timedeltastruct tds
pandas_timedelta_to_timedeltastruct(td64, unit, &tds)
return tds # <- returned as a dict to python
cdef void pydatetime_to_dtstruct(datetime dt, npy_datetimestruct *dts) noexcept:
if PyDateTime_CheckExact(dt):
dts.year = PyDateTime_GET_YEAR(dt)
else:
# We use dt.year instead of PyDateTime_GET_YEAR because with Timestamp
# we override year such that PyDateTime_GET_YEAR is incorrect.
dts.year = dt.year
dts.month = PyDateTime_GET_MONTH(dt)
dts.day = PyDateTime_GET_DAY(dt)
dts.hour = PyDateTime_DATE_GET_HOUR(dt)
dts.min = PyDateTime_DATE_GET_MINUTE(dt)
dts.sec = PyDateTime_DATE_GET_SECOND(dt)
dts.us = PyDateTime_DATE_GET_MICROSECOND(dt)
dts.ps = dts.as = 0
cdef int64_t pydatetime_to_dt64(datetime val,
npy_datetimestruct *dts,
NPY_DATETIMEUNIT reso=NPY_FR_ns):
"""
Note we are assuming that the datetime object is timezone-naive.
"""
pydatetime_to_dtstruct(val, dts)
return npy_datetimestruct_to_datetime(reso, dts)
cdef void pydate_to_dtstruct(date val, npy_datetimestruct *dts) noexcept:
dts.year = PyDateTime_GET_YEAR(val)
dts.month = PyDateTime_GET_MONTH(val)
dts.day = PyDateTime_GET_DAY(val)
dts.hour = dts.min = dts.sec = dts.us = 0
dts.ps = dts.as = 0
return
cdef int64_t pydate_to_dt64(
date val, npy_datetimestruct *dts, NPY_DATETIMEUNIT reso=NPY_FR_ns
):
pydate_to_dtstruct(val, dts)
return npy_datetimestruct_to_datetime(reso, dts)
cdef int string_to_dts(
str val,
npy_datetimestruct* dts,
NPY_DATETIMEUNIT* out_bestunit,
int* out_local,
int* out_tzoffset,
bint want_exc,
format: str | None=None,
bint exact=True,
) except? -1:
cdef:
Py_ssize_t length
const char* buf
Py_ssize_t format_length
const char* format_buf
FormatRequirement format_requirement
buf = get_c_string_buf_and_size(val, &length)
if format is None:
format_buf = b""
format_length = 0
format_requirement = INFER_FORMAT
else:
format_buf = get_c_string_buf_and_size(format, &format_length)
format_requirement = <FormatRequirement>exact
return parse_iso_8601_datetime(buf, length, want_exc,
dts, out_bestunit, out_local, out_tzoffset,
format_buf, format_length,
format_requirement)
cpdef ndarray astype_overflowsafe(
ndarray values,
cnp.dtype dtype,
bint copy=True,
bint round_ok=True,
bint is_coerce=False,
):
"""
Convert an ndarray with datetime64[X] to datetime64[Y]
or timedelta64[X] to timedelta64[Y],
raising on overflow.
"""
if values.descr.type_num == dtype.type_num == cnp.NPY_DATETIME:
# i.e. dtype.kind == "M"
dtype_name = "datetime64"
elif values.descr.type_num == dtype.type_num == cnp.NPY_TIMEDELTA:
# i.e. dtype.kind == "m"
dtype_name = "timedelta64"
else:
raise TypeError(
"astype_overflowsafe values.dtype and dtype must be either "
"both-datetime64 or both-timedelta64."
)
cdef:
NPY_DATETIMEUNIT from_unit = get_unit_from_dtype(values.dtype)
NPY_DATETIMEUNIT to_unit = get_unit_from_dtype(dtype)
if from_unit == NPY_DATETIMEUNIT.NPY_FR_GENERIC:
raise TypeError(f"{dtype_name} values must have a unit specified")
if to_unit == NPY_DATETIMEUNIT.NPY_FR_GENERIC:
# without raising explicitly here, we end up with a SystemError
# built-in function [...] returned a result with an error
raise ValueError(
f"{dtype_name} dtype must have a unit specified"
)
if from_unit == to_unit:
# Check this before allocating result for perf, might save some memory
if copy:
return values.copy()
return values
elif from_unit > to_unit:
if round_ok:
# e.g. ns -> us, so there is no risk of overflow, so we can use
# numpy's astype safely. Note there _is_ risk of truncation.
return values.astype(dtype)
else:
iresult2 = astype_round_check(values.view("i8"), from_unit, to_unit)
return iresult2.view(dtype)
if (<object>values).dtype.byteorder == ">":
# GH#29684 we incorrectly get OutOfBoundsDatetime if we dont swap
values = values.astype(values.dtype.newbyteorder("<"))
cdef:
ndarray i8values = values.view("i8")
# equiv: result = np.empty((<object>values).shape, dtype="i8")
ndarray iresult = cnp.PyArray_EMPTY(
values.ndim, values.shape, cnp.NPY_INT64, 0
)
cnp.broadcast mi = cnp.PyArray_MultiIterNew2(iresult, i8values)
Py_ssize_t i, N = values.size
int64_t value, new_value
npy_datetimestruct dts
bint is_td = dtype.type_num == cnp.NPY_TIMEDELTA
for i in range(N):
# Analogous to: item = values[i]
value = (<int64_t*>cnp.PyArray_MultiIter_DATA(mi, 1))[0]
if value == NPY_DATETIME_NAT:
new_value = NPY_DATETIME_NAT
else:
pandas_datetime_to_datetimestruct(value, from_unit, &dts)
try:
check_dts_bounds(&dts, to_unit)
except OutOfBoundsDatetime as err:
if is_coerce:
new_value = NPY_DATETIME_NAT
elif is_td:
from_abbrev = np.datetime_data(values.dtype)[0]
np_val = np.timedelta64(value, from_abbrev)
msg = (
"Cannot convert {np_val} to {dtype} without overflow"
.format(np_val=str(np_val), dtype=str(dtype))
)
raise OutOfBoundsTimedelta(msg) from err
else:
raise
else:
new_value = npy_datetimestruct_to_datetime(to_unit, &dts)
# Analogous to: iresult[i] = new_value
(<int64_t*>cnp.PyArray_MultiIter_DATA(mi, 0))[0] = new_value
cnp.PyArray_MultiIter_NEXT(mi)
return iresult.view(dtype)
# TODO: try to upstream this fix to numpy
def compare_mismatched_resolutions(ndarray left, ndarray right, op):
"""
Overflow-safe comparison of timedelta64/datetime64 with mismatched resolutions.
>>> left = np.array([500], dtype="M8[Y]")
>>> right = np.array([0], dtype="M8[ns]")
>>> left < right # <- wrong!
array([ True])
"""
if left.dtype.kind != right.dtype.kind or left.dtype.kind not in "mM":
raise ValueError("left and right must both be timedelta64 or both datetime64")
cdef:
int op_code = op_to_op_code(op)
NPY_DATETIMEUNIT left_unit = get_unit_from_dtype(left.dtype)
NPY_DATETIMEUNIT right_unit = get_unit_from_dtype(right.dtype)
# equiv: result = np.empty((<object>left).shape, dtype="bool")
ndarray result = cnp.PyArray_EMPTY(
left.ndim, left.shape, cnp.NPY_BOOL, 0
)
ndarray lvalues = left.view("i8")
ndarray rvalues = right.view("i8")
cnp.broadcast mi = cnp.PyArray_MultiIterNew3(result, lvalues, rvalues)
int64_t lval, rval
bint res_value
Py_ssize_t i, N = left.size
npy_datetimestruct ldts, rdts
for i in range(N):
# Analogous to: lval = lvalues[i]
lval = (<int64_t*>cnp.PyArray_MultiIter_DATA(mi, 1))[0]
# Analogous to: rval = rvalues[i]
rval = (<int64_t*>cnp.PyArray_MultiIter_DATA(mi, 2))[0]
if lval == NPY_DATETIME_NAT or rval == NPY_DATETIME_NAT:
res_value = op_code == Py_NE
else:
pandas_datetime_to_datetimestruct(lval, left_unit, &ldts)
pandas_datetime_to_datetimestruct(rval, right_unit, &rdts)
res_value = cmp_dtstructs(&ldts, &rdts, op_code)
# Analogous to: result[i] = res_value
(<uint8_t*>cnp.PyArray_MultiIter_DATA(mi, 0))[0] = res_value
cnp.PyArray_MultiIter_NEXT(mi)
return result
import operator
cdef int op_to_op_code(op):
# TODO: should exist somewhere?
if op is operator.eq:
return Py_EQ
if op is operator.ne:
return Py_NE
if op is operator.le:
return Py_LE
if op is operator.lt:
return Py_LT
if op is operator.ge:
return Py_GE
if op is operator.gt:
return Py_GT
cdef ndarray astype_round_check(
ndarray i8values,
NPY_DATETIMEUNIT from_unit,
NPY_DATETIMEUNIT to_unit
):
# cases with from_unit > to_unit, e.g. ns->us, raise if the conversion
# involves truncation, e.g. 1500ns->1us
cdef:
Py_ssize_t i, N = i8values.size
# equiv: iresult = np.empty((<object>i8values).shape, dtype="i8")
ndarray iresult = cnp.PyArray_EMPTY(
i8values.ndim, i8values.shape, cnp.NPY_INT64, 0
)
cnp.broadcast mi = cnp.PyArray_MultiIterNew2(iresult, i8values)
# Note the arguments to_unit, from unit are swapped vs how they
# are passed when going to a higher-frequency reso.
int64_t mult = get_conversion_factor(to_unit, from_unit)
int64_t value, mod
for i in range(N):
# Analogous to: item = i8values[i]
value = (<int64_t*>cnp.PyArray_MultiIter_DATA(mi, 1))[0]
if value == NPY_DATETIME_NAT:
new_value = NPY_DATETIME_NAT
else:
new_value, mod = divmod(value, mult)
if mod != 0:
# TODO: avoid runtime import
from pandas._libs.tslibs.dtypes import npy_unit_to_abbrev
from_abbrev = npy_unit_to_abbrev(from_unit)
to_abbrev = npy_unit_to_abbrev(to_unit)
raise ValueError(
f"Cannot losslessly cast '{value} {from_abbrev}' to {to_abbrev}"
)
# Analogous to: iresult[i] = new_value
(<int64_t*>cnp.PyArray_MultiIter_DATA(mi, 0))[0] = new_value
cnp.PyArray_MultiIter_NEXT(mi)
return iresult
@cython.overflowcheck(True)
cdef int64_t get_conversion_factor(
NPY_DATETIMEUNIT from_unit,
NPY_DATETIMEUNIT to_unit
) except? -1:
"""
Find the factor by which we need to multiply to convert from from_unit to to_unit.
"""
if (
from_unit == NPY_DATETIMEUNIT.NPY_FR_GENERIC
or to_unit == NPY_DATETIMEUNIT.NPY_FR_GENERIC
):
raise ValueError("unit-less resolutions are not supported")
if from_unit > to_unit:
raise ValueError
if from_unit == to_unit:
return 1
if from_unit == NPY_DATETIMEUNIT.NPY_FR_W:
return 7 * get_conversion_factor(NPY_DATETIMEUNIT.NPY_FR_D, to_unit)
elif from_unit == NPY_DATETIMEUNIT.NPY_FR_D:
return 24 * get_conversion_factor(NPY_DATETIMEUNIT.NPY_FR_h, to_unit)
elif from_unit == NPY_DATETIMEUNIT.NPY_FR_h:
return 60 * get_conversion_factor(NPY_DATETIMEUNIT.NPY_FR_m, to_unit)
elif from_unit == NPY_DATETIMEUNIT.NPY_FR_m:
return 60 * get_conversion_factor(NPY_DATETIMEUNIT.NPY_FR_s, to_unit)
elif from_unit == NPY_DATETIMEUNIT.NPY_FR_s:
return 1000 * get_conversion_factor(NPY_DATETIMEUNIT.NPY_FR_ms, to_unit)
elif from_unit == NPY_DATETIMEUNIT.NPY_FR_ms:
return 1000 * get_conversion_factor(NPY_DATETIMEUNIT.NPY_FR_us, to_unit)
elif from_unit == NPY_DATETIMEUNIT.NPY_FR_us:
return 1000 * get_conversion_factor(NPY_DATETIMEUNIT.NPY_FR_ns, to_unit)
elif from_unit == NPY_DATETIMEUNIT.NPY_FR_ns:
return 1000 * get_conversion_factor(NPY_DATETIMEUNIT.NPY_FR_ps, to_unit)
elif from_unit == NPY_DATETIMEUNIT.NPY_FR_ps:
return 1000 * get_conversion_factor(NPY_DATETIMEUNIT.NPY_FR_fs, to_unit)
elif from_unit == NPY_DATETIMEUNIT.NPY_FR_fs:
return 1000 * get_conversion_factor(NPY_DATETIMEUNIT.NPY_FR_as, to_unit)
else:
raise ValueError("Converting from M or Y units is not supported.")
cdef int64_t convert_reso(
int64_t value,
NPY_DATETIMEUNIT from_reso,
NPY_DATETIMEUNIT to_reso,
bint round_ok,
) except? -1:
cdef:
int64_t res_value, mult, div, mod
if from_reso == to_reso:
return value
elif to_reso < from_reso:
# e.g. ns -> us, no risk of overflow, but can be lossy rounding
mult = get_conversion_factor(to_reso, from_reso)
div, mod = divmod(value, mult)
if mod > 0 and not round_ok:
raise ValueError("Cannot losslessly convert units")
# Note that when mod > 0, we follow np.timedelta64 in always
# rounding down.
res_value = div
elif (
from_reso == NPY_FR_Y
or from_reso == NPY_FR_M
or to_reso == NPY_FR_Y
or to_reso == NPY_FR_M
):
# Converting by multiplying isn't _quite_ right bc the number of
# seconds in a month/year isn't fixed.
res_value = _convert_reso_with_dtstruct(value, from_reso, to_reso)
else:
# e.g. ns -> us, risk of overflow, but no risk of lossy rounding
mult = get_conversion_factor(from_reso, to_reso)
with cython.overflowcheck(True):
# Note: caller is responsible for re-raising as OutOfBoundsTimedelta
res_value = value * mult
return res_value
cdef int64_t _convert_reso_with_dtstruct(
int64_t value,
NPY_DATETIMEUNIT from_unit,
NPY_DATETIMEUNIT to_unit,
) except? -1:
cdef:
npy_datetimestruct dts
pandas_datetime_to_datetimestruct(value, from_unit, &dts)
check_dts_bounds(&dts, to_unit)
return npy_datetimestruct_to_datetime(to_unit, &dts)

View File

@@ -0,0 +1,12 @@
from numpy cimport int64_t
cpdef to_offset(object obj)
cdef bint is_offset_object(object obj)
cdef bint is_tick_object(object obj)
cdef class BaseOffset:
cdef readonly:
int64_t n
bint normalize
dict _cache

View File

@@ -0,0 +1,283 @@
from datetime import (
datetime,
time,
timedelta,
)
from typing import (
Any,
Collection,
Literal,
TypeVar,
overload,
)
import numpy as np
from pandas._libs.tslibs.nattype import NaTType
from pandas._typing import (
OffsetCalendar,
Self,
npt,
)
from .timedeltas import Timedelta
_BaseOffsetT = TypeVar("_BaseOffsetT", bound=BaseOffset)
_DatetimeT = TypeVar("_DatetimeT", bound=datetime)
_TimedeltaT = TypeVar("_TimedeltaT", bound=timedelta)
_relativedelta_kwds: set[str]
prefix_mapping: dict[str, type]
class ApplyTypeError(TypeError): ...
class BaseOffset:
n: int
def __init__(self, n: int = ..., normalize: bool = ...) -> None: ...
def __eq__(self, other) -> bool: ...
def __ne__(self, other) -> bool: ...
def __hash__(self) -> int: ...
@property
def kwds(self) -> dict: ...
@property
def base(self) -> BaseOffset: ...
@overload
def __add__(self, other: npt.NDArray[np.object_]) -> npt.NDArray[np.object_]: ...
@overload
def __add__(self, other: BaseOffset) -> Self: ...
@overload
def __add__(self, other: _DatetimeT) -> _DatetimeT: ...
@overload
def __add__(self, other: _TimedeltaT) -> _TimedeltaT: ...
@overload
def __radd__(self, other: npt.NDArray[np.object_]) -> npt.NDArray[np.object_]: ...
@overload
def __radd__(self, other: BaseOffset) -> Self: ...
@overload
def __radd__(self, other: _DatetimeT) -> _DatetimeT: ...
@overload
def __radd__(self, other: _TimedeltaT) -> _TimedeltaT: ...
@overload
def __radd__(self, other: NaTType) -> NaTType: ...
def __sub__(self, other: BaseOffset) -> Self: ...
@overload
def __rsub__(self, other: npt.NDArray[np.object_]) -> npt.NDArray[np.object_]: ...
@overload
def __rsub__(self, other: BaseOffset): ...
@overload
def __rsub__(self, other: _DatetimeT) -> _DatetimeT: ...
@overload
def __rsub__(self, other: _TimedeltaT) -> _TimedeltaT: ...
@overload
def __mul__(self, other: np.ndarray) -> np.ndarray: ...
@overload
def __mul__(self, other: int): ...
@overload
def __rmul__(self, other: np.ndarray) -> np.ndarray: ...
@overload
def __rmul__(self, other: int) -> Self: ...
def __neg__(self) -> Self: ...
def copy(self) -> Self: ...
@property
def name(self) -> str: ...
@property
def rule_code(self) -> str: ...
@property
def freqstr(self) -> str: ...
def _apply(self, other): ...
def _apply_array(self, dtarr) -> None: ...
def rollback(self, dt: datetime) -> datetime: ...
def rollforward(self, dt: datetime) -> datetime: ...
def is_on_offset(self, dt: datetime) -> bool: ...
def __setstate__(self, state) -> None: ...
def __getstate__(self): ...
@property
def nanos(self) -> int: ...
def is_anchored(self) -> bool: ...
def _get_offset(name: str) -> BaseOffset: ...
class SingleConstructorOffset(BaseOffset):
@classmethod
def _from_name(cls, suffix: None = ...): ...
def __reduce__(self): ...
@overload
def to_offset(freq: None) -> None: ...
@overload
def to_offset(freq: _BaseOffsetT) -> _BaseOffsetT: ...
@overload
def to_offset(freq: timedelta | str) -> BaseOffset: ...
class Tick(SingleConstructorOffset):
_creso: int
_prefix: str
def __init__(self, n: int = ..., normalize: bool = ...) -> None: ...
@property
def delta(self) -> Timedelta: ...
@property
def nanos(self) -> int: ...
def delta_to_tick(delta: timedelta) -> Tick: ...
class Day(Tick): ...
class Hour(Tick): ...
class Minute(Tick): ...
class Second(Tick): ...
class Milli(Tick): ...
class Micro(Tick): ...
class Nano(Tick): ...
class RelativeDeltaOffset(BaseOffset):
def __init__(self, n: int = ..., normalize: bool = ..., **kwds: Any) -> None: ...
class BusinessMixin(SingleConstructorOffset):
def __init__(
self, n: int = ..., normalize: bool = ..., offset: timedelta = ...
) -> None: ...
class BusinessDay(BusinessMixin): ...
class BusinessHour(BusinessMixin):
def __init__(
self,
n: int = ...,
normalize: bool = ...,
start: str | time | Collection[str | time] = ...,
end: str | time | Collection[str | time] = ...,
offset: timedelta = ...,
) -> None: ...
class WeekOfMonthMixin(SingleConstructorOffset):
def __init__(
self, n: int = ..., normalize: bool = ..., weekday: int = ...
) -> None: ...
class YearOffset(SingleConstructorOffset):
def __init__(
self, n: int = ..., normalize: bool = ..., month: int | None = ...
) -> None: ...
class BYearEnd(YearOffset): ...
class BYearBegin(YearOffset): ...
class YearEnd(YearOffset): ...
class YearBegin(YearOffset): ...
class QuarterOffset(SingleConstructorOffset):
def __init__(
self, n: int = ..., normalize: bool = ..., startingMonth: int | None = ...
) -> None: ...
class BQuarterEnd(QuarterOffset): ...
class BQuarterBegin(QuarterOffset): ...
class QuarterEnd(QuarterOffset): ...
class QuarterBegin(QuarterOffset): ...
class MonthOffset(SingleConstructorOffset): ...
class MonthEnd(MonthOffset): ...
class MonthBegin(MonthOffset): ...
class BusinessMonthEnd(MonthOffset): ...
class BusinessMonthBegin(MonthOffset): ...
class SemiMonthOffset(SingleConstructorOffset):
def __init__(
self, n: int = ..., normalize: bool = ..., day_of_month: int | None = ...
) -> None: ...
class SemiMonthEnd(SemiMonthOffset): ...
class SemiMonthBegin(SemiMonthOffset): ...
class Week(SingleConstructorOffset):
def __init__(
self, n: int = ..., normalize: bool = ..., weekday: int | None = ...
) -> None: ...
class WeekOfMonth(WeekOfMonthMixin):
def __init__(
self, n: int = ..., normalize: bool = ..., week: int = ..., weekday: int = ...
) -> None: ...
class LastWeekOfMonth(WeekOfMonthMixin): ...
class FY5253Mixin(SingleConstructorOffset):
def __init__(
self,
n: int = ...,
normalize: bool = ...,
weekday: int = ...,
startingMonth: int = ...,
variation: Literal["nearest", "last"] = ...,
) -> None: ...
class FY5253(FY5253Mixin): ...
class FY5253Quarter(FY5253Mixin):
def __init__(
self,
n: int = ...,
normalize: bool = ...,
weekday: int = ...,
startingMonth: int = ...,
qtr_with_extra_week: int = ...,
variation: Literal["nearest", "last"] = ...,
) -> None: ...
class Easter(SingleConstructorOffset): ...
class _CustomBusinessMonth(BusinessMixin):
def __init__(
self,
n: int = ...,
normalize: bool = ...,
weekmask: str = ...,
holidays: list | None = ...,
calendar: OffsetCalendar | None = ...,
offset: timedelta = ...,
) -> None: ...
class CustomBusinessDay(BusinessDay):
def __init__(
self,
n: int = ...,
normalize: bool = ...,
weekmask: str = ...,
holidays: list | None = ...,
calendar: OffsetCalendar | None = ...,
offset: timedelta = ...,
) -> None: ...
class CustomBusinessHour(BusinessHour):
def __init__(
self,
n: int = ...,
normalize: bool = ...,
weekmask: str = ...,
holidays: list | None = ...,
calendar: OffsetCalendar | None = ...,
start: str | time | Collection[str | time] = ...,
end: str | time | Collection[str | time] = ...,
offset: timedelta = ...,
) -> None: ...
class CustomBusinessMonthEnd(_CustomBusinessMonth): ...
class CustomBusinessMonthBegin(_CustomBusinessMonth): ...
class OffsetMeta(type): ...
class DateOffset(RelativeDeltaOffset, metaclass=OffsetMeta): ...
BDay = BusinessDay
BMonthEnd = BusinessMonthEnd
BMonthBegin = BusinessMonthBegin
CBMonthEnd = CustomBusinessMonthEnd
CBMonthBegin = CustomBusinessMonthBegin
CDay = CustomBusinessDay
def roll_qtrday(
other: datetime, n: int, month: int, day_opt: str, modby: int
) -> int: ...
INVALID_FREQ_ERR_MSG: Literal["Invalid frequency: {0}"]
def shift_months(
dtindex: npt.NDArray[np.int64], months: int, day_opt: str | None = ...
) -> npt.NDArray[np.int64]: ...
_offset_map: dict[str, BaseOffset]

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,14 @@
from cpython.datetime cimport datetime
from pandas._libs.tslibs.np_datetime cimport NPY_DATETIMEUNIT
cpdef str get_rule_month(str source)
cpdef quarter_to_myear(int year, int quarter, str freq)
cdef datetime parse_datetime_string(
str date_string,
bint dayfirst,
bint yearfirst,
NPY_DATETIMEUNIT* out_bestunit
)

View File

@@ -0,0 +1,38 @@
from datetime import datetime
import numpy as np
from pandas._typing import npt
class DateParseError(ValueError): ...
def py_parse_datetime_string(
date_string: str,
dayfirst: bool = ...,
yearfirst: bool = ...,
) -> datetime: ...
def parse_datetime_string_with_reso(
date_string: str,
freq: str | None = ...,
dayfirst: bool | None = ...,
yearfirst: bool | None = ...,
) -> tuple[datetime, str]: ...
def _does_string_look_like_datetime(py_string: str) -> bool: ...
def quarter_to_myear(year: int, quarter: int, freq: str) -> tuple[int, int]: ...
def try_parse_dates(
values: npt.NDArray[np.object_], # object[:]
parser,
) -> npt.NDArray[np.object_]: ...
def try_parse_year_month_day(
years: npt.NDArray[np.object_], # object[:]
months: npt.NDArray[np.object_], # object[:]
days: npt.NDArray[np.object_], # object[:]
) -> npt.NDArray[np.object_]: ...
def guess_datetime_format(
dt_str,
dayfirst: bool | None = ...,
) -> str | None: ...
def concat_date_cols(
date_cols: tuple,
) -> npt.NDArray[np.object_]: ...
def get_rule_month(source: str) -> str: ...

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,7 @@
from numpy cimport int64_t
from .np_datetime cimport npy_datetimestruct
cdef bint is_period_object(object obj)
cdef int64_t get_period_ordinal(npy_datetimestruct *dts, int freq) noexcept nogil

View File

@@ -0,0 +1,135 @@
from datetime import timedelta
from typing import Literal
import numpy as np
from pandas._libs.tslibs.dtypes import PeriodDtypeBase
from pandas._libs.tslibs.nattype import NaTType
from pandas._libs.tslibs.offsets import BaseOffset
from pandas._libs.tslibs.timestamps import Timestamp
from pandas._typing import (
Frequency,
npt,
)
INVALID_FREQ_ERR_MSG: str
DIFFERENT_FREQ: str
class IncompatibleFrequency(ValueError): ...
def periodarr_to_dt64arr(
periodarr: npt.NDArray[np.int64], # const int64_t[:]
freq: int,
) -> npt.NDArray[np.int64]: ...
def period_asfreq_arr(
arr: npt.NDArray[np.int64],
freq1: int,
freq2: int,
end: bool,
) -> npt.NDArray[np.int64]: ...
def get_period_field_arr(
field: str,
arr: npt.NDArray[np.int64], # const int64_t[:]
freq: int,
) -> npt.NDArray[np.int64]: ...
def from_ordinals(
values: npt.NDArray[np.int64], # const int64_t[:]
freq: timedelta | BaseOffset | str,
) -> npt.NDArray[np.int64]: ...
def extract_ordinals(
values: npt.NDArray[np.object_],
freq: Frequency | int,
) -> npt.NDArray[np.int64]: ...
def extract_freq(
values: npt.NDArray[np.object_],
) -> BaseOffset: ...
def period_array_strftime(
values: npt.NDArray[np.int64],
dtype_code: int,
na_rep,
date_format: str | None,
) -> npt.NDArray[np.object_]: ...
# exposed for tests
def period_asfreq(ordinal: int, freq1: int, freq2: int, end: bool) -> int: ...
def period_ordinal(
y: int, m: int, d: int, h: int, min: int, s: int, us: int, ps: int, freq: int
) -> int: ...
def freq_to_dtype_code(freq: BaseOffset) -> int: ...
def validate_end_alias(how: str) -> Literal["E", "S"]: ...
class PeriodMixin:
@property
def end_time(self) -> Timestamp: ...
@property
def start_time(self) -> Timestamp: ...
def _require_matching_freq(self, other, base: bool = ...) -> None: ...
class Period(PeriodMixin):
ordinal: int # int64_t
freq: BaseOffset
_dtype: PeriodDtypeBase
# error: "__new__" must return a class instance (got "Union[Period, NaTType]")
def __new__( # type: ignore[misc]
cls,
value=...,
freq: int | str | BaseOffset | None = ...,
ordinal: int | None = ...,
year: int | None = ...,
month: int | None = ...,
quarter: int | None = ...,
day: int | None = ...,
hour: int | None = ...,
minute: int | None = ...,
second: int | None = ...,
) -> Period | NaTType: ...
@classmethod
def _maybe_convert_freq(cls, freq) -> BaseOffset: ...
@classmethod
def _from_ordinal(cls, ordinal: int, freq) -> Period: ...
@classmethod
def now(cls, freq: BaseOffset = ...) -> Period: ...
def strftime(self, fmt: str) -> str: ...
def to_timestamp(
self,
freq: str | BaseOffset | None = ...,
how: str = ...,
) -> Timestamp: ...
def asfreq(self, freq: str | BaseOffset, how: str = ...) -> Period: ...
@property
def freqstr(self) -> str: ...
@property
def is_leap_year(self) -> bool: ...
@property
def daysinmonth(self) -> int: ...
@property
def days_in_month(self) -> int: ...
@property
def qyear(self) -> int: ...
@property
def quarter(self) -> int: ...
@property
def day_of_year(self) -> int: ...
@property
def weekday(self) -> int: ...
@property
def day_of_week(self) -> int: ...
@property
def week(self) -> int: ...
@property
def weekofyear(self) -> int: ...
@property
def second(self) -> int: ...
@property
def minute(self) -> int: ...
@property
def hour(self) -> int: ...
@property
def day(self) -> int: ...
@property
def month(self) -> int: ...
@property
def year(self) -> int: ...
def __sub__(self, other) -> Period | BaseOffset: ...
def __add__(self, other) -> Period: ...

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,4 @@
from numpy cimport int64_t
cdef bint parse_today_now(str val, int64_t* iresult, bint utc)

View File

@@ -0,0 +1,13 @@
import numpy as np
from pandas._typing import npt
def array_strptime(
values: npt.NDArray[np.object_],
fmt: str | None,
exact: bool = ...,
errors: str = ...,
utc: bool = ...,
) -> tuple[np.ndarray, np.ndarray]: ...
# first ndarray is M8[ns], second is object ndarray of tzinfo | None

View File

@@ -0,0 +1,704 @@
"""Strptime-related classes and functions.
TimeRE, _calc_julian_from_U_or_W are vendored
from the standard library, see
https://github.com/python/cpython/blob/main/Lib/_strptime.py
The original module-level docstring follows.
Strptime-related classes and functions.
CLASSES:
LocaleTime -- Discovers and stores locale-specific time information
TimeRE -- Creates regexes for pattern matching a string of text containing
time information
FUNCTIONS:
_getlang -- Figure out what language is being used for the locale
strptime -- Calculates the time struct represented by the passed-in string
"""
from datetime import timezone
from cpython.datetime cimport (
PyDate_Check,
PyDateTime_Check,
date,
import_datetime,
timedelta,
tzinfo,
)
from _strptime import (
TimeRE as _TimeRE,
_getlang,
)
from _strptime import LocaleTime # no-cython-lint
import_datetime()
from _thread import allocate_lock as _thread_allocate_lock
import re
import numpy as np
import pytz
cimport numpy as cnp
from numpy cimport (
int64_t,
ndarray,
)
from pandas._libs.missing cimport checknull_with_nat_and_na
from pandas._libs.tslibs.conversion cimport (
convert_timezone,
get_datetime64_nanos,
)
from pandas._libs.tslibs.nattype cimport (
NPY_NAT,
c_nat_strings as nat_strings,
)
from pandas._libs.tslibs.np_datetime cimport (
NPY_DATETIMEUNIT,
NPY_FR_ns,
check_dts_bounds,
import_pandas_datetime,
npy_datetimestruct,
npy_datetimestruct_to_datetime,
pydate_to_dt64,
pydatetime_to_dt64,
string_to_dts,
)
import_pandas_datetime()
from pandas._libs.tslibs.np_datetime import OutOfBoundsDatetime
from pandas._libs.tslibs.timestamps cimport _Timestamp
from pandas._libs.util cimport (
is_datetime64_object,
is_float_object,
is_integer_object,
)
from pandas._libs.tslibs.timestamps import Timestamp
cnp.import_array()
cdef bint format_is_iso(f: str):
"""
Does format match the iso8601 set that can be handled by the C parser?
Generally of form YYYY-MM-DDTHH:MM:SS - date separator can be different
but must be consistent. Leading 0s in dates and times are optional.
"""
iso_regex = re.compile(
r"""
^ # start of string
%Y # Year
(?:([-/ \\.]?)%m # month with or without separators
(?: \1%d # day with same separator as for year-month
(?:[ T]%H # hour with separator
(?:\:%M # minute with separator
(?:\:%S # second with separator
(?:%z|\.%f(?:%z)? # timezone or fractional second
)?)?)?)?)?)? # optional
$ # end of string
""",
re.VERBOSE,
)
excluded_formats = ["%Y%m"]
return re.match(iso_regex, f) is not None and f not in excluded_formats
def _test_format_is_iso(f: str) -> bool:
"""Only used in testing."""
return format_is_iso(f)
cdef bint parse_today_now(str val, int64_t* iresult, bint utc):
# We delay this check for as long as possible
# because it catches relatively rare cases
# Multiply by 1000 to convert to nanos, since these methods naturally have
# microsecond resolution
if val == "now":
if utc:
iresult[0] = Timestamp.utcnow()._value * 1000
else:
# GH#18705 make sure to_datetime("now") matches Timestamp("now")
# Note using Timestamp.now() is faster than Timestamp("now")
iresult[0] = Timestamp.now()._value * 1000
return True
elif val == "today":
iresult[0] = Timestamp.today()._value * 1000
return True
return False
cdef dict _parse_code_table = {"y": 0,
"Y": 1,
"m": 2,
"B": 3,
"b": 4,
"d": 5,
"H": 6,
"I": 7,
"M": 8,
"S": 9,
"f": 10,
"A": 11,
"a": 12,
"w": 13,
"j": 14,
"U": 15,
"W": 16,
"Z": 17,
"p": 18, # an additional key, only with I
"z": 19,
"G": 20,
"V": 21,
"u": 22}
def array_strptime(
ndarray[object] values,
str fmt,
bint exact=True,
errors="raise",
bint utc=False,
):
"""
Calculates the datetime structs represented by the passed array of strings
Parameters
----------
values : ndarray of string-like objects
fmt : string-like regex
exact : matches must be exact if True, search if False
errors : string specifying error handling, {'raise', 'ignore', 'coerce'}
"""
cdef:
Py_ssize_t i, n = len(values)
npy_datetimestruct dts
int64_t[::1] iresult
object[::1] result_timezone
int year, month, day, minute, hour, second, weekday, julian
int week_of_year, week_of_year_start, parse_code, ordinal
int iso_week, iso_year
int64_t us, ns
object val, group_key, ampm, found, tz
bint is_raise = errors=="raise"
bint is_ignore = errors=="ignore"
bint is_coerce = errors=="coerce"
bint found_naive = False
bint found_tz = False
tzinfo tz_out = None
bint iso_format = format_is_iso(fmt)
NPY_DATETIMEUNIT out_bestunit
int out_local = 0, out_tzoffset = 0
bint string_to_dts_succeeded = 0
assert is_raise or is_ignore or is_coerce
if "%W" in fmt or "%U" in fmt:
if "%Y" not in fmt and "%y" not in fmt:
raise ValueError("Cannot use '%W' or '%U' without day and year")
if "%A" not in fmt and "%a" not in fmt and "%w" not in fmt:
raise ValueError("Cannot use '%W' or '%U' without day and year")
elif "%Z" in fmt and "%z" in fmt:
raise ValueError("Cannot parse both %Z and %z")
elif "%j" in fmt and "%G" in fmt:
raise ValueError("Day of the year directive '%j' is not "
"compatible with ISO year directive '%G'. "
"Use '%Y' instead.")
elif "%G" in fmt and (
"%V" not in fmt
or not (
"%A" in fmt
or "%a" in fmt
or "%w" in fmt
or "%u" in fmt
)
):
raise ValueError("ISO year directive '%G' must be used with "
"the ISO week directive '%V' and a weekday "
"directive '%A', '%a', '%w', or '%u'.")
elif "%V" in fmt and "%Y" in fmt:
raise ValueError("ISO week directive '%V' is incompatible with "
"the year directive '%Y'. Use the ISO year "
"'%G' instead.")
elif "%V" in fmt and (
"%G" not in fmt
or not (
"%A" in fmt
or "%a" in fmt
or "%w" in fmt
or "%u" in fmt
)
):
raise ValueError("ISO week directive '%V' must be used with "
"the ISO year directive '%G' and a weekday "
"directive '%A', '%a', '%w', or '%u'.")
global _TimeRE_cache, _regex_cache
with _cache_lock:
if _getlang() != _TimeRE_cache.locale_time.lang:
_TimeRE_cache = TimeRE()
_regex_cache.clear()
if len(_regex_cache) > _CACHE_MAX_SIZE:
_regex_cache.clear()
locale_time = _TimeRE_cache.locale_time
format_regex = _regex_cache.get(fmt)
if not format_regex:
try:
format_regex = _TimeRE_cache.compile(fmt)
# KeyError raised when a bad format is found; can be specified as
# \\, in which case it was a stray % but with a space after it
except KeyError, err:
bad_directive = err.args[0]
if bad_directive == "\\":
bad_directive = "%"
del err
raise ValueError(f"'{bad_directive}' is a bad directive "
f"in format '{fmt}'")
# IndexError only occurs when the format string is "%"
except IndexError:
raise ValueError(f"stray % in format '{fmt}'")
_regex_cache[fmt] = format_regex
result = np.empty(n, dtype="M8[ns]")
iresult = result.view("i8")
result_timezone = np.empty(n, dtype="object")
dts.us = dts.ps = dts.as = 0
for i in range(n):
val = values[i]
try:
if isinstance(val, str):
if len(val) == 0 or val in nat_strings:
iresult[i] = NPY_NAT
continue
elif checknull_with_nat_and_na(val):
iresult[i] = NPY_NAT
continue
elif PyDateTime_Check(val):
if val.tzinfo is not None:
found_tz = True
else:
found_naive = True
tz_out = convert_timezone(
val.tzinfo,
tz_out,
found_naive,
found_tz,
utc,
)
if isinstance(val, _Timestamp):
iresult[i] = val.tz_localize(None).as_unit("ns")._value
else:
iresult[i] = pydatetime_to_dt64(val.replace(tzinfo=None), &dts)
check_dts_bounds(&dts)
result_timezone[i] = val.tzinfo
continue
elif PyDate_Check(val):
iresult[i] = pydate_to_dt64(val, &dts)
check_dts_bounds(&dts)
continue
elif is_datetime64_object(val):
iresult[i] = get_datetime64_nanos(val, NPY_FR_ns)
continue
elif (
(is_integer_object(val) or is_float_object(val))
and (val != val or val == NPY_NAT)
):
iresult[i] = NPY_NAT
continue
else:
val = str(val)
if fmt == "ISO8601":
string_to_dts_succeeded = not string_to_dts(
val, &dts, &out_bestunit, &out_local,
&out_tzoffset, False, None, False
)
elif iso_format:
string_to_dts_succeeded = not string_to_dts(
val, &dts, &out_bestunit, &out_local,
&out_tzoffset, False, fmt, exact
)
if string_to_dts_succeeded:
# No error reported by string_to_dts, pick back up
# where we left off
value = npy_datetimestruct_to_datetime(NPY_FR_ns, &dts)
if out_local == 1:
# Store the out_tzoffset in seconds
# since we store the total_seconds of
# dateutil.tz.tzoffset objects
tz = timezone(timedelta(minutes=out_tzoffset))
result_timezone[i] = tz
out_local = 0
out_tzoffset = 0
iresult[i] = value
check_dts_bounds(&dts)
continue
if parse_today_now(val, &iresult[i], utc):
continue
# Some ISO formats can't be parsed by string_to_dts
# For example, 6-digit YYYYMD. So, if there's an error, and a format
# was specified, then try the string-matching code below. If the format
# specified was 'ISO8601', then we need to error, because
# only string_to_dts handles mixed ISO8601 formats.
if not string_to_dts_succeeded and fmt == "ISO8601":
raise ValueError(f"Time data {val} is not ISO8601 format")
# exact matching
if exact:
found = format_regex.match(val)
if not found:
raise ValueError(
f"time data \"{val}\" doesn't match format \"{fmt}\""
)
if len(val) != found.end():
raise ValueError(
"unconverted data remains when parsing with "
f"format \"{fmt}\": \"{val[found.end():]}\""
)
# search
else:
found = format_regex.search(val)
if not found:
raise ValueError(
f"time data \"{val}\" doesn't match format \"{fmt}\""
)
iso_year = -1
year = 1900
month = day = 1
hour = minute = second = ns = us = 0
tz = None
# Default to -1 to signify that values not known; not critical to have,
# though
iso_week = week_of_year = -1
week_of_year_start = -1
# weekday and julian defaulted to -1 so as to signal need to calculate
# values
weekday = julian = -1
found_dict = found.groupdict()
for group_key in found_dict.iterkeys():
# Directives not explicitly handled below:
# c, x, X
# handled by making out of other directives
# U, W
# worthless without day of the week
parse_code = _parse_code_table[group_key]
if parse_code == 0:
year = int(found_dict["y"])
# Open Group specification for strptime() states that a %y
# value in the range of [00, 68] is in the century 2000, while
# [69,99] is in the century 1900
if year <= 68:
year += 2000
else:
year += 1900
elif parse_code == 1:
year = int(found_dict["Y"])
elif parse_code == 2:
month = int(found_dict["m"])
# elif group_key == 'B':
elif parse_code == 3:
month = locale_time.f_month.index(found_dict["B"].lower())
# elif group_key == 'b':
elif parse_code == 4:
month = locale_time.a_month.index(found_dict["b"].lower())
# elif group_key == 'd':
elif parse_code == 5:
day = int(found_dict["d"])
# elif group_key == 'H':
elif parse_code == 6:
hour = int(found_dict["H"])
elif parse_code == 7:
hour = int(found_dict["I"])
ampm = found_dict.get("p", "").lower()
# If there was no AM/PM indicator, we'll treat this like AM
if ampm in ("", locale_time.am_pm[0]):
# We're in AM so the hour is correct unless we're
# looking at 12 midnight.
# 12 midnight == 12 AM == hour 0
if hour == 12:
hour = 0
elif ampm == locale_time.am_pm[1]:
# We're in PM so we need to add 12 to the hour unless
# we're looking at 12 noon.
# 12 noon == 12 PM == hour 12
if hour != 12:
hour += 12
elif parse_code == 8:
minute = int(found_dict["M"])
elif parse_code == 9:
second = int(found_dict["S"])
elif parse_code == 10:
s = found_dict["f"]
# Pad to always return nanoseconds
s += "0" * (9 - len(s))
us = long(s)
ns = us % 1000
us = us // 1000
elif parse_code == 11:
weekday = locale_time.f_weekday.index(found_dict["A"].lower())
elif parse_code == 12:
weekday = locale_time.a_weekday.index(found_dict["a"].lower())
elif parse_code == 13:
weekday = int(found_dict["w"])
if weekday == 0:
weekday = 6
else:
weekday -= 1
elif parse_code == 14:
julian = int(found_dict["j"])
elif parse_code == 15 or parse_code == 16:
week_of_year = int(found_dict[group_key])
if group_key == "U":
# U starts week on Sunday.
week_of_year_start = 6
else:
# W starts week on Monday.
week_of_year_start = 0
elif parse_code == 17:
tz = pytz.timezone(found_dict["Z"])
elif parse_code == 19:
tz = parse_timezone_directive(found_dict["z"])
elif parse_code == 20:
iso_year = int(found_dict["G"])
elif parse_code == 21:
iso_week = int(found_dict["V"])
elif parse_code == 22:
weekday = int(found_dict["u"])
weekday -= 1
# If we know the wk of the year and what day of that wk, we can figure
# out the Julian day of the year.
if julian == -1 and weekday != -1:
if week_of_year != -1:
week_starts_Mon = week_of_year_start == 0
julian = _calc_julian_from_U_or_W(year, week_of_year, weekday,
week_starts_Mon)
elif iso_year != -1 and iso_week != -1:
year, julian = _calc_julian_from_V(iso_year, iso_week,
weekday + 1)
# Cannot pre-calculate date() since can change in Julian
# calculation and thus could have different value for the day of the wk
# calculation.
if julian == -1:
# Need to add 1 to result since first day of the year is 1, not
# 0.
ordinal = date(year, month, day).toordinal()
julian = ordinal - date(year, 1, 1).toordinal() + 1
else:
# Assume that if they bothered to include Julian day it will
# be accurate.
datetime_result = date.fromordinal(
(julian - 1) + date(year, 1, 1).toordinal())
year = datetime_result.year
month = datetime_result.month
day = datetime_result.day
if weekday == -1:
weekday = date(year, month, day).weekday()
dts.year = year
dts.month = month
dts.day = day
dts.hour = hour
dts.min = minute
dts.sec = second
dts.us = us
dts.ps = ns * 1000
iresult[i] = npy_datetimestruct_to_datetime(NPY_FR_ns, &dts)
check_dts_bounds(&dts)
result_timezone[i] = tz
except (ValueError, OutOfBoundsDatetime) as ex:
ex.args = (
f"{str(ex)}, at position {i}. You might want to try:\n"
" - passing `format` if your strings have a consistent format;\n"
" - passing `format='ISO8601'` if your strings are "
"all ISO8601 but not necessarily in exactly the same format;\n"
" - passing `format='mixed'`, and the format will be "
"inferred for each element individually. "
"You might want to use `dayfirst` alongside this.",
)
if is_coerce:
iresult[i] = NPY_NAT
continue
elif is_raise:
raise
return values, []
return result, result_timezone.base
class TimeRE(_TimeRE):
"""
Handle conversion from format directives to regexes.
Creates regexes for pattern matching a string of text containing
time information
"""
def __init__(self, locale_time=None):
"""
Create keys/values.
Order of execution is important for dependency reasons.
"""
self._Z = None
super().__init__(locale_time=locale_time)
# GH 48767: Overrides for cpython's TimeRE
# 1) Parse up to nanos instead of micros
self.update({"f": r"(?P<f>[0-9]{1,9})"}),
def __getitem__(self, key):
if key == "Z":
# lazy computation
if self._Z is None:
self._Z = self.__seqToRE(pytz.all_timezones, "Z")
# Note: handling Z is the key difference vs using the stdlib
# _strptime.TimeRE. test_to_datetime_parse_tzname_or_tzoffset with
# fmt='%Y-%m-%d %H:%M:%S %Z' fails with the stdlib version.
return self._Z
return super().__getitem__(key)
_cache_lock = _thread_allocate_lock()
# DO NOT modify _TimeRE_cache or _regex_cache without acquiring the cache lock
# first!
_TimeRE_cache = TimeRE()
_CACHE_MAX_SIZE = 5 # Max number of regexes stored in _regex_cache
_regex_cache = {}
cdef int _calc_julian_from_U_or_W(int year, int week_of_year,
int day_of_week, int week_starts_Mon):
"""
Calculate the Julian day based on the year, week of the year, and day of
the week, with week_start_day representing whether the week of the year
assumes the week starts on Sunday or Monday (6 or 0).
Parameters
----------
year : int
the year
week_of_year : int
week taken from format U or W
week_starts_Mon : int
represents whether the week of the year
assumes the week starts on Sunday or Monday (6 or 0)
Returns
-------
int
converted julian day
"""
cdef:
int first_weekday, week_0_length, days_to_week
first_weekday = date(year, 1, 1).weekday()
# If we are dealing with the %U directive (week starts on Sunday), it's
# easier to just shift the view to Sunday being the first day of the
# week.
if not week_starts_Mon:
first_weekday = (first_weekday + 1) % 7
day_of_week = (day_of_week + 1) % 7
# Need to watch out for a week 0 (when the first day of the year is not
# the same as that specified by %U or %W).
week_0_length = (7 - first_weekday) % 7
if week_of_year == 0:
return 1 + day_of_week - first_weekday
else:
days_to_week = week_0_length + (7 * (week_of_year - 1))
return 1 + days_to_week + day_of_week
cdef (int, int) _calc_julian_from_V(int iso_year, int iso_week, int iso_weekday):
"""
Calculate the Julian day based on the ISO 8601 year, week, and weekday.
ISO weeks start on Mondays, with week 01 being the week containing 4 Jan.
ISO week days range from 1 (Monday) to 7 (Sunday).
Parameters
----------
iso_year : int
the year taken from format %G
iso_week : int
the week taken from format %V
iso_weekday : int
weekday taken from format %u
Returns
-------
(int, int)
the iso year and the Gregorian ordinal date / julian date
"""
cdef:
int correction, ordinal
correction = date(iso_year, 1, 4).isoweekday() + 3
ordinal = (iso_week * 7) + iso_weekday - correction
# ordinal may be negative or 0 now, which means the date is in the previous
# calendar year
if ordinal < 1:
ordinal += date(iso_year, 1, 1).toordinal()
iso_year -= 1
ordinal -= date(iso_year, 1, 1).toordinal()
return iso_year, ordinal
cdef tzinfo parse_timezone_directive(str z):
"""
Parse the '%z' directive and return a datetime.timezone object.
Parameters
----------
z : string of the UTC offset
Returns
-------
datetime.timezone
Notes
-----
This is essentially similar to the cpython implementation
https://github.com/python/cpython/blob/master/Lib/_strptime.py#L457-L479
"""
cdef:
int hours, minutes, seconds, pad_number, microseconds
int total_minutes
object gmtoff_remainder, gmtoff_remainder_padding
if z == "Z":
return timezone(timedelta(0))
if z[3] == ":":
z = z[:3] + z[4:]
if len(z) > 5:
if z[5] != ":":
raise ValueError(f"Inconsistent use of : in {z}")
z = z[:5] + z[6:]
hours = int(z[1:3])
minutes = int(z[3:5])
seconds = int(z[5:7] or 0)
# Pad to always return microseconds.
gmtoff_remainder = z[8:]
pad_number = 6 - len(gmtoff_remainder)
gmtoff_remainder_padding = "0" * pad_number
microseconds = int(gmtoff_remainder + gmtoff_remainder_padding)
total_minutes = ((hours * 60) + minutes + (seconds // 60) +
(microseconds // 60_000_000))
total_minutes = -total_minutes if z.startswith("-") else total_minutes
return timezone(timedelta(minutes=total_minutes))

View File

@@ -0,0 +1,28 @@
from cpython.datetime cimport timedelta
from numpy cimport int64_t
from .np_datetime cimport NPY_DATETIMEUNIT
# Exposed for tslib, not intended for outside use.
cpdef int64_t delta_to_nanoseconds(
delta, NPY_DATETIMEUNIT reso=*, bint round_ok=*
) except? -1
cdef convert_to_timedelta64(object ts, str unit)
cdef bint is_any_td_scalar(object obj)
cdef class _Timedelta(timedelta):
cdef readonly:
int64_t _value # nanoseconds
bint _is_populated # are my components populated
int64_t _d, _h, _m, _s, _ms, _us, _ns
NPY_DATETIMEUNIT _creso
cpdef timedelta to_pytimedelta(_Timedelta self)
cdef bint _has_ns(self)
cdef bint _is_in_pytimedelta_bounds(self)
cdef _ensure_components(_Timedelta self)
cdef bint _compare_mismatched_resos(self, _Timedelta other, op)
cdef _Timedelta _as_creso(self, NPY_DATETIMEUNIT reso, bint round_ok=*)
cpdef _maybe_cast_to_matching_resos(self, _Timedelta other)

View File

@@ -0,0 +1,169 @@
from datetime import timedelta
from typing import (
ClassVar,
Literal,
TypeAlias,
TypeVar,
overload,
)
import numpy as np
from pandas._libs.tslibs import (
NaTType,
Tick,
)
from pandas._typing import (
Self,
npt,
)
# This should be kept consistent with the keys in the dict timedelta_abbrevs
# in pandas/_libs/tslibs/timedeltas.pyx
UnitChoices: TypeAlias = Literal[
"Y",
"y",
"M",
"W",
"w",
"D",
"d",
"days",
"day",
"hours",
"hour",
"hr",
"h",
"m",
"minute",
"min",
"minutes",
"T",
"t",
"s",
"seconds",
"sec",
"second",
"ms",
"milliseconds",
"millisecond",
"milli",
"millis",
"L",
"l",
"us",
"microseconds",
"microsecond",
"µs",
"micro",
"micros",
"u",
"ns",
"nanoseconds",
"nano",
"nanos",
"nanosecond",
"n",
]
_S = TypeVar("_S", bound=timedelta)
def ints_to_pytimedelta(
arr: npt.NDArray[np.timedelta64],
box: bool = ...,
) -> npt.NDArray[np.object_]: ...
def array_to_timedelta64(
values: npt.NDArray[np.object_],
unit: str | None = ...,
errors: str = ...,
) -> np.ndarray: ... # np.ndarray[m8ns]
def parse_timedelta_unit(unit: str | None) -> UnitChoices: ...
def delta_to_nanoseconds(
delta: np.timedelta64 | timedelta | Tick,
reso: int = ..., # NPY_DATETIMEUNIT
round_ok: bool = ...,
) -> int: ...
def floordiv_object_array(
left: np.ndarray, right: npt.NDArray[np.object_]
) -> np.ndarray: ...
def truediv_object_array(
left: np.ndarray, right: npt.NDArray[np.object_]
) -> np.ndarray: ...
class Timedelta(timedelta):
_creso: int
min: ClassVar[Timedelta]
max: ClassVar[Timedelta]
resolution: ClassVar[Timedelta]
value: int # np.int64
_value: int # np.int64
# error: "__new__" must return a class instance (got "Union[Timestamp, NaTType]")
def __new__( # type: ignore[misc]
cls: type[_S],
value=...,
unit: str | None = ...,
**kwargs: float | np.integer | np.floating,
) -> _S | NaTType: ...
@classmethod
def _from_value_and_reso(cls, value: np.int64, reso: int) -> Timedelta: ...
@property
def days(self) -> int: ...
@property
def seconds(self) -> int: ...
@property
def microseconds(self) -> int: ...
def total_seconds(self) -> float: ...
def to_pytimedelta(self) -> timedelta: ...
def to_timedelta64(self) -> np.timedelta64: ...
@property
def asm8(self) -> np.timedelta64: ...
# TODO: round/floor/ceil could return NaT?
def round(self, freq: str) -> Self: ...
def floor(self, freq: str) -> Self: ...
def ceil(self, freq: str) -> Self: ...
@property
def resolution_string(self) -> str: ...
def __add__(self, other: timedelta) -> Timedelta: ...
def __radd__(self, other: timedelta) -> Timedelta: ...
def __sub__(self, other: timedelta) -> Timedelta: ...
def __rsub__(self, other: timedelta) -> Timedelta: ...
def __neg__(self) -> Timedelta: ...
def __pos__(self) -> Timedelta: ...
def __abs__(self) -> Timedelta: ...
def __mul__(self, other: float) -> Timedelta: ...
def __rmul__(self, other: float) -> Timedelta: ...
# error: Signature of "__floordiv__" incompatible with supertype "timedelta"
@overload # type: ignore[override]
def __floordiv__(self, other: timedelta) -> int: ...
@overload
def __floordiv__(self, other: float) -> Timedelta: ...
@overload
def __floordiv__(
self, other: npt.NDArray[np.timedelta64]
) -> npt.NDArray[np.intp]: ...
@overload
def __floordiv__(
self, other: npt.NDArray[np.number]
) -> npt.NDArray[np.timedelta64] | Timedelta: ...
@overload
def __rfloordiv__(self, other: timedelta | str) -> int: ...
@overload
def __rfloordiv__(self, other: None | NaTType) -> NaTType: ...
@overload
def __rfloordiv__(self, other: np.ndarray) -> npt.NDArray[np.timedelta64]: ...
@overload
def __truediv__(self, other: timedelta) -> float: ...
@overload
def __truediv__(self, other: float) -> Timedelta: ...
def __mod__(self, other: timedelta) -> Timedelta: ...
def __divmod__(self, other: timedelta) -> tuple[int, Timedelta]: ...
def __le__(self, other: timedelta) -> bool: ...
def __lt__(self, other: timedelta) -> bool: ...
def __ge__(self, other: timedelta) -> bool: ...
def __gt__(self, other: timedelta) -> bool: ...
def __hash__(self) -> int: ...
def isoformat(self) -> str: ...
def to_numpy(self) -> np.timedelta64: ...
def view(self, dtype: npt.DTypeLike = ...) -> object: ...
@property
def unit(self) -> str: ...
def as_unit(self, unit: str, round_ok: bool = ...) -> Timedelta: ...

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,36 @@
from cpython.datetime cimport (
datetime,
tzinfo,
)
from numpy cimport int64_t
from pandas._libs.tslibs.base cimport ABCTimestamp
from pandas._libs.tslibs.np_datetime cimport (
NPY_DATETIMEUNIT,
npy_datetimestruct,
)
from pandas._libs.tslibs.offsets cimport BaseOffset
cdef _Timestamp create_timestamp_from_ts(int64_t value,
npy_datetimestruct dts,
tzinfo tz,
bint fold,
NPY_DATETIMEUNIT reso=*)
cdef class _Timestamp(ABCTimestamp):
cdef readonly:
int64_t _value, nanosecond, year
NPY_DATETIMEUNIT _creso
cdef bint _get_start_end_field(self, str field, freq)
cdef _get_date_name_field(self, str field, object locale)
cdef int64_t _maybe_convert_value_to_local(self)
cdef bint _can_compare(self, datetime other)
cpdef to_datetime64(self)
cpdef datetime to_pydatetime(_Timestamp self, bint warn=*)
cdef bint _compare_outside_nanorange(_Timestamp self, datetime other,
int op) except -1
cdef bint _compare_mismatched_resos(_Timestamp self, _Timestamp other, int op)
cdef _Timestamp _as_creso(_Timestamp self, NPY_DATETIMEUNIT creso, bint round_ok=*)

View File

@@ -0,0 +1,240 @@
from datetime import (
date as _date,
datetime,
time as _time,
timedelta,
tzinfo as _tzinfo,
)
from time import struct_time
from typing import (
ClassVar,
TypeVar,
overload,
)
import numpy as np
from pandas._libs.tslibs import (
BaseOffset,
NaTType,
Period,
Tick,
Timedelta,
)
from pandas._typing import (
Self,
TimestampNonexistent,
)
_DatetimeT = TypeVar("_DatetimeT", bound=datetime)
def integer_op_not_supported(obj: object) -> TypeError: ...
class Timestamp(datetime):
_creso: int
min: ClassVar[Timestamp]
max: ClassVar[Timestamp]
resolution: ClassVar[Timedelta]
_value: int # np.int64
# error: "__new__" must return a class instance (got "Union[Timestamp, NaTType]")
def __new__( # type: ignore[misc]
cls: type[_DatetimeT],
ts_input: np.integer | float | str | _date | datetime | np.datetime64 = ...,
year: int | None = ...,
month: int | None = ...,
day: int | None = ...,
hour: int | None = ...,
minute: int | None = ...,
second: int | None = ...,
microsecond: int | None = ...,
tzinfo: _tzinfo | None = ...,
*,
nanosecond: int | None = ...,
tz: str | _tzinfo | None | int = ...,
unit: str | int | None = ...,
fold: int | None = ...,
) -> _DatetimeT | NaTType: ...
@classmethod
def _from_value_and_reso(
cls, value: int, reso: int, tz: _tzinfo | None
) -> Timestamp: ...
@property
def value(self) -> int: ... # np.int64
@property
def year(self) -> int: ...
@property
def month(self) -> int: ...
@property
def day(self) -> int: ...
@property
def hour(self) -> int: ...
@property
def minute(self) -> int: ...
@property
def second(self) -> int: ...
@property
def microsecond(self) -> int: ...
@property
def nanosecond(self) -> int: ...
@property
def tzinfo(self) -> _tzinfo | None: ...
@property
def tz(self) -> _tzinfo | None: ...
@property
def fold(self) -> int: ...
@classmethod
def fromtimestamp(cls, ts: float, tz: _tzinfo | None = ...) -> Self: ...
@classmethod
def utcfromtimestamp(cls, ts: float) -> Self: ...
@classmethod
def today(cls, tz: _tzinfo | str | None = ...) -> Self: ...
@classmethod
def fromordinal(
cls,
ordinal: int,
tz: _tzinfo | str | None = ...,
) -> Self: ...
@classmethod
def now(cls, tz: _tzinfo | str | None = ...) -> Self: ...
@classmethod
def utcnow(cls) -> Self: ...
# error: Signature of "combine" incompatible with supertype "datetime"
@classmethod
def combine( # type: ignore[override]
cls, date: _date, time: _time
) -> datetime: ...
@classmethod
def fromisoformat(cls, date_string: str) -> Self: ...
def strftime(self, format: str) -> str: ...
def __format__(self, fmt: str) -> str: ...
def toordinal(self) -> int: ...
def timetuple(self) -> struct_time: ...
def timestamp(self) -> float: ...
def utctimetuple(self) -> struct_time: ...
def date(self) -> _date: ...
def time(self) -> _time: ...
def timetz(self) -> _time: ...
# LSP violation: nanosecond is not present in datetime.datetime.replace
# and has positional args following it
def replace( # type: ignore[override]
self,
year: int | None = ...,
month: int | None = ...,
day: int | None = ...,
hour: int | None = ...,
minute: int | None = ...,
second: int | None = ...,
microsecond: int | None = ...,
nanosecond: int | None = ...,
tzinfo: _tzinfo | type[object] | None = ...,
fold: int | None = ...,
) -> Self: ...
# LSP violation: datetime.datetime.astimezone has a default value for tz
def astimezone(self, tz: _tzinfo | None) -> Self: ... # type: ignore[override]
def ctime(self) -> str: ...
def isoformat(self, sep: str = ..., timespec: str = ...) -> str: ...
@classmethod
def strptime(
# Note: strptime is actually disabled and raises NotImplementedError
cls,
date_string: str,
format: str,
) -> Self: ...
def utcoffset(self) -> timedelta | None: ...
def tzname(self) -> str | None: ...
def dst(self) -> timedelta | None: ...
def __le__(self, other: datetime) -> bool: ... # type: ignore[override]
def __lt__(self, other: datetime) -> bool: ... # type: ignore[override]
def __ge__(self, other: datetime) -> bool: ... # type: ignore[override]
def __gt__(self, other: datetime) -> bool: ... # type: ignore[override]
# error: Signature of "__add__" incompatible with supertype "date"/"datetime"
@overload # type: ignore[override]
def __add__(self, other: np.ndarray) -> np.ndarray: ...
@overload
def __add__(self, other: timedelta | np.timedelta64 | Tick) -> Self: ...
def __radd__(self, other: timedelta) -> Self: ...
@overload # type: ignore[override]
def __sub__(self, other: datetime) -> Timedelta: ...
@overload
def __sub__(self, other: timedelta | np.timedelta64 | Tick) -> Self: ...
def __hash__(self) -> int: ...
def weekday(self) -> int: ...
def isoweekday(self) -> int: ...
# Return type "Tuple[int, int, int]" of "isocalendar" incompatible with return
# type "_IsoCalendarDate" in supertype "date"
def isocalendar(self) -> tuple[int, int, int]: ... # type: ignore[override]
@property
def is_leap_year(self) -> bool: ...
@property
def is_month_start(self) -> bool: ...
@property
def is_quarter_start(self) -> bool: ...
@property
def is_year_start(self) -> bool: ...
@property
def is_month_end(self) -> bool: ...
@property
def is_quarter_end(self) -> bool: ...
@property
def is_year_end(self) -> bool: ...
def to_pydatetime(self, warn: bool = ...) -> datetime: ...
def to_datetime64(self) -> np.datetime64: ...
def to_period(self, freq: BaseOffset | str = ...) -> Period: ...
def to_julian_date(self) -> np.float64: ...
@property
def asm8(self) -> np.datetime64: ...
def tz_convert(self, tz: _tzinfo | str | None) -> Self: ...
# TODO: could return NaT?
def tz_localize(
self,
tz: _tzinfo | str | None,
ambiguous: str = ...,
nonexistent: TimestampNonexistent = ...,
) -> Self: ...
def normalize(self) -> Self: ...
# TODO: round/floor/ceil could return NaT?
def round(
self,
freq: str,
ambiguous: bool | str = ...,
nonexistent: TimestampNonexistent = ...,
) -> Self: ...
def floor(
self,
freq: str,
ambiguous: bool | str = ...,
nonexistent: TimestampNonexistent = ...,
) -> Self: ...
def ceil(
self,
freq: str,
ambiguous: bool | str = ...,
nonexistent: TimestampNonexistent = ...,
) -> Self: ...
def day_name(self, locale: str | None = ...) -> str: ...
def month_name(self, locale: str | None = ...) -> str: ...
@property
def day_of_week(self) -> int: ...
@property
def dayofweek(self) -> int: ...
@property
def day_of_year(self) -> int: ...
@property
def dayofyear(self) -> int: ...
@property
def quarter(self) -> int: ...
@property
def week(self) -> int: ...
def to_numpy(
self, dtype: np.dtype | None = ..., copy: bool = ...
) -> np.datetime64: ...
@property
def _date_repr(self) -> str: ...
@property
def days_in_month(self) -> int: ...
@property
def daysinmonth(self) -> int: ...
@property
def unit(self) -> str: ...
def as_unit(self, unit: str, round_ok: bool = ...) -> Timestamp: ...

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,23 @@
from cpython.datetime cimport (
datetime,
timedelta,
tzinfo,
)
cdef tzinfo utc_stdlib
cpdef bint is_utc(tzinfo tz)
cdef bint is_tzlocal(tzinfo tz)
cdef bint is_zoneinfo(tzinfo tz)
cdef bint treat_tz_as_pytz(tzinfo tz)
cpdef bint tz_compare(tzinfo start, tzinfo end)
cpdef object get_timezone(tzinfo tz)
cpdef tzinfo maybe_get_tz(object tz)
cdef timedelta get_utcoffset(tzinfo tz, datetime obj)
cpdef bint is_fixed_offset(tzinfo tz)
cdef object get_dst_info(tzinfo tz)

View File

@@ -0,0 +1,21 @@
from datetime import (
datetime,
tzinfo,
)
from typing import Callable
import numpy as np
# imported from dateutil.tz
dateutil_gettz: Callable[[str], tzinfo]
def tz_standardize(tz: tzinfo) -> tzinfo: ...
def tz_compare(start: tzinfo | None, end: tzinfo | None) -> bool: ...
def infer_tzinfo(
start: datetime | None,
end: datetime | None,
) -> tzinfo | None: ...
def maybe_get_tz(tz: str | int | np.int64 | tzinfo | None) -> tzinfo | None: ...
def get_timezone(tz: tzinfo) -> tzinfo | str: ...
def is_utc(tz: tzinfo | None) -> bool: ...
def is_fixed_offset(tz: tzinfo) -> bool: ...

View File

@@ -0,0 +1,449 @@
from datetime import (
timedelta,
timezone,
)
from pandas.compat._optional import import_optional_dependency
try:
# py39+
import zoneinfo
from zoneinfo import ZoneInfo
except ImportError:
zoneinfo = None
ZoneInfo = None
from cpython.datetime cimport (
datetime,
timedelta,
tzinfo,
)
# dateutil compat
from dateutil.tz import (
gettz as dateutil_gettz,
tzfile as _dateutil_tzfile,
tzlocal as _dateutil_tzlocal,
tzutc as _dateutil_tzutc,
)
import numpy as np
import pytz
from pytz.tzinfo import BaseTzInfo as _pytz_BaseTzInfo
cimport numpy as cnp
from numpy cimport int64_t
cnp.import_array()
# ----------------------------------------------------------------------
from pandas._libs.tslibs.util cimport (
get_nat,
is_integer_object,
)
cdef int64_t NPY_NAT = get_nat()
cdef tzinfo utc_stdlib = timezone.utc
cdef tzinfo utc_pytz = pytz.utc
cdef tzinfo utc_dateutil_str = dateutil_gettz("UTC") # NB: *not* the same as tzutc()
cdef tzinfo utc_zoneinfo = None
# ----------------------------------------------------------------------
cdef bint is_utc_zoneinfo(tzinfo tz):
# Workaround for cases with missing tzdata
# https://github.com/pandas-dev/pandas/pull/46425#discussion_r830633025
if tz is None or zoneinfo is None:
return False
global utc_zoneinfo
if utc_zoneinfo is None:
try:
utc_zoneinfo = ZoneInfo("UTC")
except zoneinfo.ZoneInfoNotFoundError:
return False
# Warn if tzdata is too old, even if there is a system tzdata to alert
# users about the mismatch between local/system tzdata
import_optional_dependency("tzdata", errors="warn", min_version="2022.1")
return tz is utc_zoneinfo
cpdef inline bint is_utc(tzinfo tz):
return (
tz is utc_pytz
or tz is utc_stdlib
or isinstance(tz, _dateutil_tzutc)
or tz is utc_dateutil_str
or is_utc_zoneinfo(tz)
)
cdef bint is_zoneinfo(tzinfo tz):
if ZoneInfo is None:
return False
return isinstance(tz, ZoneInfo)
cdef bint is_tzlocal(tzinfo tz):
return isinstance(tz, _dateutil_tzlocal)
cdef bint treat_tz_as_pytz(tzinfo tz):
return (hasattr(tz, "_utc_transition_times") and
hasattr(tz, "_transition_info"))
cdef bint treat_tz_as_dateutil(tzinfo tz):
return hasattr(tz, "_trans_list") and hasattr(tz, "_trans_idx")
# Returns str or tzinfo object
cpdef inline object get_timezone(tzinfo tz):
"""
We need to do several things here:
1) Distinguish between pytz and dateutil timezones
2) Not be over-specific (e.g. US/Eastern with/without DST is same *zone*
but a different tz object)
3) Provide something to serialize when we're storing a datetime object
in pytables.
We return a string prefaced with dateutil if it's a dateutil tz, else just
the tz name. It needs to be a string so that we can serialize it with
UJSON/pytables. maybe_get_tz (below) is the inverse of this process.
"""
if tz is None:
raise TypeError("tz argument cannot be None")
if is_utc(tz):
return tz
else:
if treat_tz_as_dateutil(tz):
if ".tar.gz" in tz._filename:
raise ValueError(
"Bad tz filename. Dateutil on python 3 on windows has a "
"bug which causes tzfile._filename to be the same for all "
"timezone files. Please construct dateutil timezones "
'implicitly by passing a string like "dateutil/Europe'
'/London" when you construct your pandas objects instead '
"of passing a timezone object. See "
"https://github.com/pandas-dev/pandas/pull/7362")
return "dateutil/" + tz._filename
else:
# tz is a pytz timezone or unknown.
try:
zone = tz.zone
if zone is None:
return tz
return zone
except AttributeError:
return tz
cpdef inline tzinfo maybe_get_tz(object tz):
"""
(Maybe) Construct a timezone object from a string. If tz is a string, use
it to construct a timezone object. Otherwise, just return tz.
"""
if isinstance(tz, str):
if tz == "tzlocal()":
tz = _dateutil_tzlocal()
elif tz.startswith("dateutil/"):
zone = tz[9:]
tz = dateutil_gettz(zone)
# On Python 3 on Windows, the filename is not always set correctly.
if isinstance(tz, _dateutil_tzfile) and ".tar.gz" in tz._filename:
tz._filename = zone
elif tz[0] in {"-", "+"}:
hours = int(tz[0:3])
minutes = int(tz[0] + tz[4:6])
tz = timezone(timedelta(hours=hours, minutes=minutes))
elif tz[0:4] in {"UTC-", "UTC+"}:
hours = int(tz[3:6])
minutes = int(tz[3] + tz[7:9])
tz = timezone(timedelta(hours=hours, minutes=minutes))
elif tz == "UTC" or tz == "utc":
tz = utc_stdlib
else:
tz = pytz.timezone(tz)
elif is_integer_object(tz):
tz = timezone(timedelta(seconds=tz))
elif isinstance(tz, tzinfo):
pass
elif tz is None:
pass
else:
raise TypeError(type(tz))
return tz
def _p_tz_cache_key(tz: tzinfo):
"""
Python interface for cache function to facilitate testing.
"""
return tz_cache_key(tz)
# Timezone data caches, key is the pytz string or dateutil file name.
dst_cache = {}
cdef object tz_cache_key(tzinfo tz):
"""
Return the key in the cache for the timezone info object or None
if unknown.
The key is currently the tz string for pytz timezones, the filename for
dateutil timezones.
Notes
-----
This cannot just be the hash of a timezone object. Unfortunately, the
hashes of two dateutil tz objects which represent the same timezone are
not equal (even though the tz objects will compare equal and represent
the same tz file). Also, pytz objects are not always hashable so we use
str(tz) instead.
"""
if isinstance(tz, _pytz_BaseTzInfo):
return tz.zone
elif isinstance(tz, _dateutil_tzfile):
if ".tar.gz" in tz._filename:
raise ValueError("Bad tz filename. Dateutil on python 3 on "
"windows has a bug which causes tzfile._filename "
"to be the same for all timezone files. Please "
"construct dateutil timezones implicitly by "
'passing a string like "dateutil/Europe/London" '
"when you construct your pandas objects instead "
"of passing a timezone object. See "
"https://github.com/pandas-dev/pandas/pull/7362")
return "dateutil" + tz._filename
else:
return None
# ----------------------------------------------------------------------
# UTC Offsets
cdef timedelta get_utcoffset(tzinfo tz, datetime obj):
try:
return tz._utcoffset
except AttributeError:
return tz.utcoffset(obj)
cpdef inline bint is_fixed_offset(tzinfo tz):
if treat_tz_as_dateutil(tz):
if len(tz._trans_idx) == 0 and len(tz._trans_list) == 0:
return 1
else:
return 0
elif treat_tz_as_pytz(tz):
if (len(tz._transition_info) == 0
and len(tz._utc_transition_times) == 0):
return 1
else:
return 0
elif is_zoneinfo(tz):
return 0
# This also implicitly accepts datetime.timezone objects which are
# considered fixed
return 1
cdef object _get_utc_trans_times_from_dateutil_tz(tzinfo tz):
"""
Transition times in dateutil timezones are stored in local non-dst
time. This code converts them to UTC. It's the reverse of the code
in dateutil.tz.tzfile.__init__.
"""
new_trans = list(tz._trans_list)
last_std_offset = 0
for i, (trans, tti) in enumerate(zip(tz._trans_list, tz._trans_idx)):
if not tti.isdst:
last_std_offset = tti.offset
new_trans[i] = trans - last_std_offset
return new_trans
cdef int64_t[::1] unbox_utcoffsets(object transinfo):
cdef:
Py_ssize_t i
cnp.npy_intp sz
int64_t[::1] arr
sz = len(transinfo)
arr = cnp.PyArray_EMPTY(1, &sz, cnp.NPY_INT64, 0)
for i in range(sz):
arr[i] = int(transinfo[i][0].total_seconds()) * 1_000_000_000
return arr
# ----------------------------------------------------------------------
# Daylight Savings
cdef object get_dst_info(tzinfo tz):
"""
Returns
-------
ndarray[int64_t]
Nanosecond UTC times of DST transitions.
ndarray[int64_t]
Nanosecond UTC offsets corresponding to DST transitions.
str
Describing the type of tzinfo object.
"""
cache_key = tz_cache_key(tz)
if cache_key is None:
# e.g. pytz.FixedOffset, matplotlib.dates._UTC,
# psycopg2.tz.FixedOffsetTimezone
num = int(get_utcoffset(tz, None).total_seconds()) * 1_000_000_000
# If we have e.g. ZoneInfo here, the get_utcoffset call will return None,
# so the total_seconds() call will raise AttributeError.
return (np.array([NPY_NAT + 1], dtype=np.int64),
np.array([num], dtype=np.int64),
"unknown")
if cache_key not in dst_cache:
if treat_tz_as_pytz(tz):
trans = np.array(tz._utc_transition_times, dtype="M8[ns]")
trans = trans.view("i8")
if tz._utc_transition_times[0].year == 1:
trans[0] = NPY_NAT + 1
deltas = unbox_utcoffsets(tz._transition_info)
typ = "pytz"
elif treat_tz_as_dateutil(tz):
if len(tz._trans_list):
# get utc trans times
trans_list = _get_utc_trans_times_from_dateutil_tz(tz)
trans = np.hstack([
np.array([0], dtype="M8[s]"), # place holder for 1st item
np.array(trans_list, dtype="M8[s]")]).astype(
"M8[ns]") # all trans listed
trans = trans.view("i8")
trans[0] = NPY_NAT + 1
# deltas
deltas = np.array([v.offset for v in (
tz._ttinfo_before,) + tz._trans_idx], dtype="i8")
deltas *= 1_000_000_000
typ = "dateutil"
elif is_fixed_offset(tz):
trans = np.array([NPY_NAT + 1], dtype=np.int64)
deltas = np.array([tz._ttinfo_std.offset],
dtype="i8") * 1_000_000_000
typ = "fixed"
else:
# 2018-07-12 this is not reached in the tests, and this case
# is not handled in any of the functions that call
# get_dst_info. If this case _were_ hit the calling
# functions would then hit an IndexError because they assume
# `deltas` is non-empty.
# (under the just-deleted code that returned empty arrays)
raise AssertionError("dateutil tzinfo is not a FixedOffset "
"and has an empty `_trans_list`.", tz)
else:
# static tzinfo, we can get here with pytz.StaticTZInfo
# which are not caught by treat_tz_as_pytz
trans = np.array([NPY_NAT + 1], dtype=np.int64)
num = int(get_utcoffset(tz, None).total_seconds()) * 1_000_000_000
deltas = np.array([num], dtype=np.int64)
typ = "static"
dst_cache[cache_key] = (trans, deltas, typ)
return dst_cache[cache_key]
def infer_tzinfo(datetime start, datetime end):
if start is not None and end is not None:
tz = start.tzinfo
if not tz_compare(tz, end.tzinfo):
raise AssertionError(f"Inputs must both have the same timezone, "
f"{tz} != {end.tzinfo}")
elif start is not None:
tz = start.tzinfo
elif end is not None:
tz = end.tzinfo
else:
tz = None
return tz
cpdef bint tz_compare(tzinfo start, tzinfo end):
"""
Compare string representations of timezones
The same timezone can be represented as different instances of
timezones. For example
`<DstTzInfo 'Europe/Paris' LMT+0:09:00 STD>` and
`<DstTzInfo 'Europe/Paris' CET+1:00:00 STD>` are essentially same
timezones but aren't evaluated such, but the string representation
for both of these is `'Europe/Paris'`.
This exists only to add a notion of equality to pytz-style zones
that is compatible with the notion of equality expected of tzinfo
subclasses.
Parameters
----------
start : tzinfo
end : tzinfo
Returns:
-------
bool
"""
# GH 18523
if is_utc(start):
# GH#38851 consider pytz/dateutil/stdlib UTCs as equivalent
return is_utc(end)
elif is_utc(end):
# Ensure we don't treat tzlocal as equal to UTC when running in UTC
return False
elif start is None or end is None:
return start is None and end is None
return get_timezone(start) == get_timezone(end)
def tz_standardize(tz: tzinfo) -> tzinfo:
"""
If the passed tz is a pytz timezone object, "normalize" it to the a
consistent version
Parameters
----------
tz : tzinfo
Returns
-------
tzinfo
Examples
--------
>>> from datetime import datetime
>>> from pytz import timezone
>>> tz = timezone('US/Pacific').normalize(
... datetime(2014, 1, 1, tzinfo=pytz.utc)
... ).tzinfo
>>> tz
<DstTzInfo 'US/Pacific' PST-1 day, 16:00:00 STD>
>>> tz_standardize(tz)
<DstTzInfo 'US/Pacific' LMT-1 day, 16:07:00 STD>
>>> tz = timezone('US/Pacific')
>>> tz
<DstTzInfo 'US/Pacific' LMT-1 day, 16:07:00 STD>
>>> tz_standardize(tz)
<DstTzInfo 'US/Pacific' LMT-1 day, 16:07:00 STD>
"""
if treat_tz_as_pytz(tz):
return pytz.timezone(str(tz))
return tz

View File

@@ -0,0 +1,39 @@
from cpython.datetime cimport tzinfo
from numpy cimport (
int64_t,
intp_t,
ndarray,
)
from pandas._libs.tslibs.np_datetime cimport NPY_DATETIMEUNIT
cpdef int64_t tz_convert_from_utc_single(
int64_t utc_val, tzinfo tz, NPY_DATETIMEUNIT creso=*
) except? -1
cdef int64_t tz_localize_to_utc_single(
int64_t val,
tzinfo tz,
object ambiguous=*,
object nonexistent=*,
NPY_DATETIMEUNIT creso=*,
) except? -1
cdef class Localizer:
cdef:
tzinfo tz
NPY_DATETIMEUNIT _creso
bint use_utc, use_fixed, use_tzlocal, use_dst, use_pytz
ndarray trans
Py_ssize_t ntrans
const int64_t[::1] deltas
int64_t delta
int64_t* tdata
cdef int64_t utc_val_to_local_val(
self,
int64_t utc_val,
Py_ssize_t* pos,
bint* fold=?,
) except? -1

View File

@@ -0,0 +1,21 @@
from datetime import (
timedelta,
tzinfo,
)
from typing import Iterable
import numpy as np
from pandas._typing import npt
# tz_convert_from_utc_single exposed for testing
def tz_convert_from_utc_single(
val: np.int64, tz: tzinfo, creso: int = ...
) -> np.int64: ...
def tz_localize_to_utc(
vals: npt.NDArray[np.int64],
tz: tzinfo | None,
ambiguous: str | bool | Iterable[bool] | None = ...,
nonexistent: str | timedelta | np.timedelta64 | None = ...,
creso: int = ..., # NPY_DATETIMEUNIT
) -> npt.NDArray[np.int64]: ...

View File

@@ -0,0 +1,822 @@
"""
timezone conversion
"""
cimport cython
from cpython.datetime cimport (
PyDelta_Check,
datetime,
datetime_new,
import_datetime,
timedelta,
tzinfo,
)
from cython cimport Py_ssize_t
import_datetime()
import numpy as np
import pytz
cimport numpy as cnp
from numpy cimport (
int64_t,
intp_t,
ndarray,
uint8_t,
)
cnp.import_array()
from pandas._libs.tslibs.dtypes cimport (
periods_per_day,
periods_per_second,
)
from pandas._libs.tslibs.nattype cimport NPY_NAT
from pandas._libs.tslibs.np_datetime cimport (
NPY_DATETIMEUNIT,
import_pandas_datetime,
npy_datetimestruct,
pandas_datetime_to_datetimestruct,
pydatetime_to_dt64,
)
import_pandas_datetime()
from pandas._libs.tslibs.timezones cimport (
get_dst_info,
is_fixed_offset,
is_tzlocal,
is_utc,
is_zoneinfo,
utc_stdlib,
)
cdef const int64_t[::1] _deltas_placeholder = np.array([], dtype=np.int64)
@cython.freelist(16)
@cython.final
cdef class Localizer:
# cdef:
# tzinfo tz
# NPY_DATETIMEUNIT _creso
# bint use_utc, use_fixed, use_tzlocal, use_dst, use_pytz
# ndarray trans
# Py_ssize_t ntrans
# const int64_t[::1] deltas
# int64_t delta
# int64_t* tdata
@cython.initializedcheck(False)
@cython.boundscheck(False)
def __cinit__(self, tzinfo tz, NPY_DATETIMEUNIT creso):
self.tz = tz
self._creso = creso
self.use_utc = self.use_tzlocal = self.use_fixed = False
self.use_dst = self.use_pytz = False
self.ntrans = -1 # placeholder
self.delta = -1 # placeholder
self.deltas = _deltas_placeholder
self.tdata = NULL
if is_utc(tz) or tz is None:
self.use_utc = True
elif is_tzlocal(tz) or is_zoneinfo(tz):
self.use_tzlocal = True
else:
trans, deltas, typ = get_dst_info(tz)
if creso != NPY_DATETIMEUNIT.NPY_FR_ns:
# NB: using floordiv here is implicitly assuming we will
# never see trans or deltas that are not an integer number
# of seconds.
# TODO: avoid these np.array calls
if creso == NPY_DATETIMEUNIT.NPY_FR_us:
trans = np.array(trans) // 1_000
deltas = np.array(deltas) // 1_000
elif creso == NPY_DATETIMEUNIT.NPY_FR_ms:
trans = np.array(trans) // 1_000_000
deltas = np.array(deltas) // 1_000_000
elif creso == NPY_DATETIMEUNIT.NPY_FR_s:
trans = np.array(trans) // 1_000_000_000
deltas = np.array(deltas) // 1_000_000_000
else:
raise NotImplementedError(creso)
self.trans = trans
self.ntrans = self.trans.shape[0]
self.deltas = deltas
if typ != "pytz" and typ != "dateutil":
# static/fixed; in this case we know that len(delta) == 1
self.use_fixed = True
self.delta = deltas[0]
else:
self.use_dst = True
if typ == "pytz":
self.use_pytz = True
self.tdata = <int64_t*>cnp.PyArray_DATA(trans)
@cython.boundscheck(False)
cdef int64_t utc_val_to_local_val(
self, int64_t utc_val, Py_ssize_t* pos, bint* fold=NULL
) except? -1:
if self.use_utc:
return utc_val
elif self.use_tzlocal:
return utc_val + _tz_localize_using_tzinfo_api(
utc_val, self.tz, to_utc=False, creso=self._creso, fold=fold
)
elif self.use_fixed:
return utc_val + self.delta
else:
pos[0] = bisect_right_i8(self.tdata, utc_val, self.ntrans) - 1
if fold is not NULL:
fold[0] = _infer_dateutil_fold(
utc_val, self.trans, self.deltas, pos[0]
)
return utc_val + self.deltas[pos[0]]
cdef int64_t tz_localize_to_utc_single(
int64_t val,
tzinfo tz,
object ambiguous=None,
object nonexistent=None,
NPY_DATETIMEUNIT creso=NPY_DATETIMEUNIT.NPY_FR_ns,
) except? -1:
"""See tz_localize_to_utc.__doc__"""
cdef:
int64_t delta
int64_t[::1] deltas
if val == NPY_NAT:
return val
elif is_utc(tz) or tz is None:
return val
elif is_tzlocal(tz):
return val - _tz_localize_using_tzinfo_api(val, tz, to_utc=True, creso=creso)
elif is_fixed_offset(tz):
_, deltas, _ = get_dst_info(tz)
delta = deltas[0]
# TODO: de-duplicate with Localizer.__init__
if creso != NPY_DATETIMEUNIT.NPY_FR_ns:
if creso == NPY_DATETIMEUNIT.NPY_FR_us:
delta = delta // 1000
elif creso == NPY_DATETIMEUNIT.NPY_FR_ms:
delta = delta // 1_000_000
elif creso == NPY_DATETIMEUNIT.NPY_FR_s:
delta = delta // 1_000_000_000
return val - delta
else:
return tz_localize_to_utc(
np.array([val], dtype="i8"),
tz,
ambiguous=ambiguous,
nonexistent=nonexistent,
creso=creso,
)[0]
@cython.boundscheck(False)
@cython.wraparound(False)
def tz_localize_to_utc(
ndarray[int64_t] vals,
tzinfo tz,
object ambiguous=None,
object nonexistent=None,
NPY_DATETIMEUNIT creso=NPY_DATETIMEUNIT.NPY_FR_ns,
):
"""
Localize tzinfo-naive i8 to given time zone (using pytz). If
there are ambiguities in the values, raise AmbiguousTimeError.
Parameters
----------
vals : ndarray[int64_t]
tz : tzinfo or None
ambiguous : str, bool, or arraylike
When clocks moved backward due to DST, ambiguous times may arise.
For example in Central European Time (UTC+01), when going from 03:00
DST to 02:00 non-DST, 02:30:00 local time occurs both at 00:30:00 UTC
and at 01:30:00 UTC. In such a situation, the `ambiguous` parameter
dictates how ambiguous times should be handled.
- 'infer' will attempt to infer fall dst-transition hours based on
order
- bool-ndarray where True signifies a DST time, False signifies a
non-DST time (note that this flag is only applicable for ambiguous
times, but the array must have the same length as vals)
- bool if True, treat all vals as DST. If False, treat them as non-DST
- 'NaT' will return NaT where there are ambiguous times
nonexistent : {None, "NaT", "shift_forward", "shift_backward", "raise", \
timedelta-like}
How to handle non-existent times when converting wall times to UTC
creso : NPY_DATETIMEUNIT, default NPY_FR_ns
Returns
-------
localized : ndarray[int64_t]
"""
if tz is None or is_utc(tz) or vals.size == 0:
# Fastpath, avoid overhead of creating Localizer
return vals.copy()
cdef:
ndarray[uint8_t, cast=True] ambiguous_array
Py_ssize_t i, n = vals.shape[0]
Py_ssize_t delta_idx_offset, delta_idx
int64_t v, left, right, val, new_local, remaining_mins
int64_t first_delta, delta
int64_t shift_delta = 0
ndarray[int64_t] result_a, result_b, dst_hours
int64_t[::1] result
bint is_zi = False
bint infer_dst = False, is_dst = False, fill = False
bint shift_forward = False, shift_backward = False
bint fill_nonexist = False
str stamp
Localizer info = Localizer(tz, creso=creso)
int64_t pph = periods_per_day(creso) // 24
int64_t pps = periods_per_second(creso)
npy_datetimestruct dts
# Vectorized version of DstTzInfo.localize
# silence false-positive compiler warning
ambiguous_array = np.empty(0, dtype=bool)
if isinstance(ambiguous, str):
if ambiguous == "infer":
infer_dst = True
elif ambiguous == "NaT":
fill = True
elif isinstance(ambiguous, bool):
is_dst = True
if ambiguous:
ambiguous_array = np.ones(len(vals), dtype=bool)
else:
ambiguous_array = np.zeros(len(vals), dtype=bool)
elif hasattr(ambiguous, "__iter__"):
is_dst = True
if len(ambiguous) != len(vals):
raise ValueError("Length of ambiguous bool-array must be "
"the same size as vals")
ambiguous_array = np.asarray(ambiguous, dtype=bool)
if nonexistent == "NaT":
fill_nonexist = True
elif nonexistent == "shift_forward":
shift_forward = True
elif nonexistent == "shift_backward":
shift_backward = True
elif PyDelta_Check(nonexistent):
from .timedeltas import delta_to_nanoseconds
shift_delta = delta_to_nanoseconds(nonexistent, reso=creso)
elif nonexistent not in ("raise", None):
msg = ("nonexistent must be one of {'NaT', 'raise', 'shift_forward', "
"shift_backwards} or a timedelta object")
raise ValueError(msg)
result = cnp.PyArray_EMPTY(vals.ndim, vals.shape, cnp.NPY_INT64, 0)
if info.use_tzlocal and not is_zoneinfo(tz):
for i in range(n):
v = vals[i]
if v == NPY_NAT:
result[i] = NPY_NAT
else:
result[i] = v - _tz_localize_using_tzinfo_api(
v, tz, to_utc=True, creso=creso
)
return result.base # to return underlying ndarray
elif info.use_fixed:
delta = info.delta
for i in range(n):
v = vals[i]
if v == NPY_NAT:
result[i] = NPY_NAT
else:
result[i] = v - delta
return result.base # to return underlying ndarray
# Determine whether each date lies left of the DST transition (store in
# result_a) or right of the DST transition (store in result_b)
if is_zoneinfo(tz):
is_zi = True
result_a, result_b =_get_utc_bounds_zoneinfo(
vals, tz, creso=creso
)
else:
result_a, result_b =_get_utc_bounds(
vals, info.tdata, info.ntrans, info.deltas, creso=creso
)
# silence false-positive compiler warning
dst_hours = np.empty(0, dtype=np.int64)
if infer_dst:
dst_hours = _get_dst_hours(vals, result_a, result_b, creso=creso)
# Pre-compute delta_idx_offset that will be used if we go down non-existent
# paths.
# Shift the delta_idx by if the UTC offset of
# the target tz is greater than 0 and we're moving forward
# or vice versa
first_delta = info.deltas[0]
if (shift_forward or shift_delta > 0) and first_delta > 0:
delta_idx_offset = 1
elif (shift_backward or shift_delta < 0) and first_delta < 0:
delta_idx_offset = 1
else:
delta_idx_offset = 0
for i in range(n):
val = vals[i]
left = result_a[i]
right = result_b[i]
if val == NPY_NAT:
# TODO: test with non-nano
result[i] = val
elif left != NPY_NAT and right != NPY_NAT:
if left == right:
# TODO: test with non-nano
result[i] = left
else:
if infer_dst and dst_hours[i] != NPY_NAT:
# TODO: test with non-nano
result[i] = dst_hours[i]
elif is_dst:
if ambiguous_array[i]:
result[i] = left
else:
result[i] = right
elif fill:
# TODO: test with non-nano; parametrize test_dt_round_tz_ambiguous
result[i] = NPY_NAT
else:
stamp = _render_tstamp(val, creso=creso)
raise pytz.AmbiguousTimeError(
f"Cannot infer dst time from {stamp}, try using the "
"'ambiguous' argument"
)
elif left != NPY_NAT:
result[i] = left
elif right != NPY_NAT:
# TODO: test with non-nano
result[i] = right
else:
# Handle nonexistent times
if shift_forward or shift_backward or shift_delta != 0:
# Shift the nonexistent time to the closest existing time
remaining_mins = val % pph
if shift_delta != 0:
# Validate that we don't relocalize on another nonexistent
# time
if -1 < shift_delta + remaining_mins < pph:
raise ValueError(
"The provided timedelta will relocalize on a "
f"nonexistent time: {nonexistent}"
)
new_local = val + shift_delta
elif shift_forward:
new_local = val + (pph - remaining_mins)
else:
# Subtract 1 since the beginning hour is _inclusive_ of
# nonexistent times
new_local = val - remaining_mins - 1
if is_zi:
# use the same construction as in _get_utc_bounds_zoneinfo
pandas_datetime_to_datetimestruct(new_local, creso, &dts)
extra = (dts.ps // 1000) * (pps // 1_000_000_000)
dt = datetime_new(dts.year, dts.month, dts.day, dts.hour,
dts.min, dts.sec, dts.us, None)
if shift_forward or shift_delta > 0:
dt = dt.replace(tzinfo=tz, fold=1)
else:
dt = dt.replace(tzinfo=tz, fold=0)
dt = dt.astimezone(utc_stdlib)
dt = dt.replace(tzinfo=None)
result[i] = pydatetime_to_dt64(dt, &dts, creso) + extra
else:
delta_idx = bisect_right_i8(info.tdata, new_local, info.ntrans)
delta_idx = delta_idx - delta_idx_offset
result[i] = new_local - info.deltas[delta_idx]
elif fill_nonexist:
result[i] = NPY_NAT
else:
stamp = _render_tstamp(val, creso=creso)
raise pytz.NonExistentTimeError(stamp)
return result.base # .base to get underlying ndarray
cdef Py_ssize_t bisect_right_i8(int64_t *data, int64_t val, Py_ssize_t n):
# Caller is responsible for checking n > 0
# This looks very similar to local_search_right in the ndarray.searchsorted
# implementation.
cdef:
Py_ssize_t pivot, left = 0, right = n
# edge cases
if val > data[n - 1]:
return n
# Caller is responsible for ensuring 'val >= data[0]'. This is
# ensured by the fact that 'data' comes from get_dst_info where data[0]
# is *always* NPY_NAT+1. If that ever changes, we will need to restore
# the following disabled check.
# if val < data[0]:
# return 0
while left < right:
pivot = left + (right - left) // 2
if data[pivot] <= val:
left = pivot + 1
else:
right = pivot
return left
cdef str _render_tstamp(int64_t val, NPY_DATETIMEUNIT creso):
""" Helper function to render exception messages"""
from pandas._libs.tslibs.timestamps import Timestamp
ts = Timestamp._from_value_and_reso(val, creso, None)
return str(ts)
cdef _get_utc_bounds(
ndarray vals,
int64_t* tdata,
Py_ssize_t ntrans,
const int64_t[::1] deltas,
NPY_DATETIMEUNIT creso,
):
# Determine whether each date lies left of the DST transition (store in
# result_a) or right of the DST transition (store in result_b)
cdef:
ndarray result_a, result_b
Py_ssize_t i, n = vals.size
int64_t val, v_left, v_right
Py_ssize_t isl, isr, pos_left, pos_right
int64_t ppd = periods_per_day(creso)
result_a = cnp.PyArray_EMPTY(vals.ndim, vals.shape, cnp.NPY_INT64, 0)
result_b = cnp.PyArray_EMPTY(vals.ndim, vals.shape, cnp.NPY_INT64, 0)
for i in range(n):
# This loops resembles the "Find the two best possibilities" block
# in pytz's DstTZInfo.localize method.
result_a[i] = NPY_NAT
result_b[i] = NPY_NAT
val = vals[i]
if val == NPY_NAT:
continue
# TODO: be careful of overflow in val-ppd
isl = bisect_right_i8(tdata, val - ppd, ntrans) - 1
if isl < 0:
isl = 0
v_left = val - deltas[isl]
pos_left = bisect_right_i8(tdata, v_left, ntrans) - 1
# timestamp falls to the left side of the DST transition
if v_left + deltas[pos_left] == val:
result_a[i] = v_left
# TODO: be careful of overflow in val+ppd
isr = bisect_right_i8(tdata, val + ppd, ntrans) - 1
if isr < 0:
isr = 0
v_right = val - deltas[isr]
pos_right = bisect_right_i8(tdata, v_right, ntrans) - 1
# timestamp falls to the right side of the DST transition
if v_right + deltas[pos_right] == val:
result_b[i] = v_right
return result_a, result_b
cdef _get_utc_bounds_zoneinfo(ndarray vals, tz, NPY_DATETIMEUNIT creso):
"""
For each point in 'vals', find the UTC time that it corresponds to if
with fold=0 and fold=1. In non-ambiguous cases, these will match.
Parameters
----------
vals : ndarray[int64_t]
tz : ZoneInfo
creso : NPY_DATETIMEUNIT
Returns
-------
ndarray[int64_t]
ndarray[int64_t]
"""
cdef:
Py_ssize_t i, n = vals.size
npy_datetimestruct dts
datetime dt, rt, left, right, aware, as_utc
int64_t val, pps = periods_per_second(creso)
ndarray result_a, result_b
result_a = cnp.PyArray_EMPTY(vals.ndim, vals.shape, cnp.NPY_INT64, 0)
result_b = cnp.PyArray_EMPTY(vals.ndim, vals.shape, cnp.NPY_INT64, 0)
for i in range(n):
val = vals[i]
if val == NPY_NAT:
result_a[i] = NPY_NAT
result_b[i] = NPY_NAT
continue
pandas_datetime_to_datetimestruct(val, creso, &dts)
# casting to pydatetime drops nanoseconds etc, which we will
# need to re-add later as 'extra'
extra = (dts.ps // 1000) * (pps // 1_000_000_000)
dt = datetime_new(dts.year, dts.month, dts.day, dts.hour,
dts.min, dts.sec, dts.us, None)
aware = dt.replace(tzinfo=tz)
as_utc = aware.astimezone(utc_stdlib)
rt = as_utc.astimezone(tz)
if aware != rt:
# AFAICT this means that 'aware' is non-existent
# TODO: better way to check this?
# mail.python.org/archives/list/datetime-sig@python.org/
# thread/57Y3IQAASJOKHX4D27W463XTZIS2NR3M/
result_a[i] = NPY_NAT
else:
left = as_utc.replace(tzinfo=None)
result_a[i] = pydatetime_to_dt64(left, &dts, creso) + extra
aware = dt.replace(fold=1, tzinfo=tz)
as_utc = aware.astimezone(utc_stdlib)
rt = as_utc.astimezone(tz)
if aware != rt:
result_b[i] = NPY_NAT
else:
right = as_utc.replace(tzinfo=None)
result_b[i] = pydatetime_to_dt64(right, &dts, creso) + extra
return result_a, result_b
@cython.boundscheck(False)
cdef ndarray[int64_t] _get_dst_hours(
# vals, creso only needed here to potential render an exception message
const int64_t[:] vals,
ndarray[int64_t] result_a,
ndarray[int64_t] result_b,
NPY_DATETIMEUNIT creso,
):
cdef:
Py_ssize_t i, n = vals.shape[0]
ndarray[uint8_t, cast=True] mismatch
ndarray[int64_t] delta, dst_hours
ndarray[intp_t] switch_idxs, trans_idx, grp, a_idx, b_idx, one_diff
list trans_grp
intp_t switch_idx
int64_t left, right
dst_hours = cnp.PyArray_EMPTY(result_a.ndim, result_a.shape, cnp.NPY_INT64, 0)
dst_hours[:] = NPY_NAT
mismatch = cnp.PyArray_ZEROS(result_a.ndim, result_a.shape, cnp.NPY_BOOL, 0)
for i in range(n):
left = result_a[i]
right = result_b[i]
# Get the ambiguous hours (given the above, these are the hours
# where result_a != result_b and neither of them are NAT)
if left != right and left != NPY_NAT and right != NPY_NAT:
mismatch[i] = 1
trans_idx = mismatch.nonzero()[0]
if trans_idx.size == 1:
# see test_tz_localize_to_utc_ambiguous_infer
stamp = _render_tstamp(vals[trans_idx[0]], creso=creso)
raise pytz.AmbiguousTimeError(
f"Cannot infer dst time from {stamp} as there "
"are no repeated times"
)
# Split the array into contiguous chunks (where the difference between
# indices is 1). These are effectively dst transitions in different
# years which is useful for checking that there is not an ambiguous
# transition in an individual year.
if trans_idx.size > 0:
one_diff = np.where(np.diff(trans_idx) != 1)[0] + 1
trans_grp = np.array_split(trans_idx, one_diff)
# Iterate through each day, if there are no hours where the
# delta is negative (indicates a repeat of hour) the switch
# cannot be inferred
for grp in trans_grp:
delta = np.diff(result_a[grp])
if grp.size == 1 or np.all(delta > 0):
# see test_tz_localize_to_utc_ambiguous_infer
stamp = _render_tstamp(vals[grp[0]], creso=creso)
raise pytz.AmbiguousTimeError(stamp)
# Find the index for the switch and pull from a for dst and b
# for standard
switch_idxs = (delta <= 0).nonzero()[0]
if switch_idxs.size > 1:
# see test_tz_localize_to_utc_ambiguous_infer
raise pytz.AmbiguousTimeError(
f"There are {switch_idxs.size} dst switches when "
"there should only be 1."
)
switch_idx = switch_idxs[0] + 1
# Pull the only index and adjust
a_idx = grp[:switch_idx]
b_idx = grp[switch_idx:]
dst_hours[grp] = np.hstack((result_a[a_idx], result_b[b_idx]))
return dst_hours
# ----------------------------------------------------------------------
# Timezone Conversion
cpdef int64_t tz_convert_from_utc_single(
int64_t utc_val, tzinfo tz, NPY_DATETIMEUNIT creso=NPY_DATETIMEUNIT.NPY_FR_ns
) except? -1:
"""
Convert the val (in i8) from UTC to tz
This is a single value version of tz_convert_from_utc.
Parameters
----------
utc_val : int64
tz : tzinfo
creso : NPY_DATETIMEUNIT, default NPY_FR_ns
Returns
-------
converted: int64
"""
cdef:
Localizer info = Localizer(tz, creso=creso)
Py_ssize_t pos
# Note: caller is responsible for ensuring utc_val != NPY_NAT
return info.utc_val_to_local_val(utc_val, &pos)
# OSError may be thrown by tzlocal on windows at or close to 1970-01-01
# see https://github.com/pandas-dev/pandas/pull/37591#issuecomment-720628241
cdef int64_t _tz_localize_using_tzinfo_api(
int64_t val,
tzinfo tz,
bint to_utc=True,
NPY_DATETIMEUNIT creso=NPY_DATETIMEUNIT.NPY_FR_ns,
bint* fold=NULL,
) except? -1:
"""
Convert the i8 representation of a datetime from a general-case timezone to
UTC, or vice-versa using the datetime/tzinfo API.
Private, not intended for use outside of tslibs.tzconversion.
Parameters
----------
val : int64_t
tz : tzinfo
to_utc : bint
True if converting _to_ UTC, False if going the other direction.
creso : NPY_DATETIMEUNIT
fold : bint*, default NULL
pointer to fold: whether datetime ends up in a fold or not
after adjustment.
Only passed with to_utc=False.
Returns
-------
delta : int64_t
Value to add when converting from utc, subtract when converting to utc.
Notes
-----
Sets fold by pointer
"""
cdef:
npy_datetimestruct dts
datetime dt
int64_t delta
timedelta td
int64_t pps = periods_per_second(creso)
pandas_datetime_to_datetimestruct(val, creso, &dts)
# datetime_new is cython-optimized constructor
if not to_utc:
# tz.utcoffset only makes sense if datetime
# is _wall time_, so if val is a UTC timestamp convert to wall time
dt = _astimezone(dts, tz)
if fold is not NULL:
# NB: fold is only passed with to_utc=False
fold[0] = dt.fold
else:
dt = datetime_new(dts.year, dts.month, dts.day, dts.hour,
dts.min, dts.sec, dts.us, None)
td = tz.utcoffset(dt)
delta = int(td.total_seconds() * pps)
return delta
cdef datetime _astimezone(npy_datetimestruct dts, tzinfo tz):
"""
Optimized equivalent to:
dt = datetime(dts.year, dts.month, dts.day, dts.hour,
dts.min, dts.sec, dts.us, utc_stdlib)
dt = dt.astimezone(tz)
Derived from the datetime.astimezone implementation at
https://github.com/python/cpython/blob/main/Modules/_datetimemodule.c#L6187
NB: we are assuming tz is not None.
"""
cdef:
datetime result
result = datetime_new(dts.year, dts.month, dts.day, dts.hour,
dts.min, dts.sec, dts.us, tz)
return tz.fromutc(result)
# NB: relies on dateutil internals, subject to change.
@cython.boundscheck(False)
@cython.wraparound(False)
cdef bint _infer_dateutil_fold(
int64_t value,
const int64_t[::1] trans,
const int64_t[::1] deltas,
Py_ssize_t pos,
):
"""
Infer _TSObject fold property from value by assuming 0 and then setting
to 1 if necessary.
Parameters
----------
value : int64_t
trans : ndarray[int64_t]
ndarray of offset transition points in nanoseconds since epoch.
deltas : int64_t[:]
array of offsets corresponding to transition points in trans.
pos : Py_ssize_t
Position of the last transition point before taking fold into account.
Returns
-------
bint
Due to daylight saving time, one wall clock time can occur twice
when shifting from summer to winter time; fold describes whether the
datetime-like corresponds to the first (0) or the second time (1)
the wall clock hits the ambiguous time
References
----------
.. [1] "PEP 495 - Local Time Disambiguation"
https://www.python.org/dev/peps/pep-0495/#the-fold-attribute
"""
cdef:
bint fold = 0
int64_t fold_delta
if pos > 0:
fold_delta = deltas[pos - 1] - deltas[pos]
if value - fold_delta < trans[pos]:
fold = 1
return fold

View File

@@ -0,0 +1,227 @@
from cpython.object cimport PyTypeObject
cdef extern from "Python.h":
# Note: importing extern-style allows us to declare these as nogil
# functions, whereas `from cpython cimport` does not.
bint PyBool_Check(object obj) nogil
bint PyFloat_Check(object obj) nogil
bint PyComplex_Check(object obj) nogil
bint PyObject_TypeCheck(object obj, PyTypeObject* type) nogil
# TODO(cython3): cimport this, xref GH#49670
# Note that following functions can potentially raise an exception,
# thus they cannot be declared 'nogil'. Also PyUnicode_AsUTF8AndSize() can
# potentially allocate memory inside in unlikely case of when underlying
# unicode object was stored as non-utf8 and utf8 wasn't requested before.
const char* PyUnicode_AsUTF8AndSize(object obj,
Py_ssize_t* length) except NULL
object PyUnicode_EncodeLocale(object obj, const char *errors) nogil
object PyUnicode_DecodeLocale(const char *str, const char *errors) nogil
from numpy cimport (
float64_t,
int64_t,
)
cdef extern from "numpy/arrayobject.h":
PyTypeObject PyFloatingArrType_Type
cdef extern from "numpy/ndarrayobject.h":
PyTypeObject PyTimedeltaArrType_Type
PyTypeObject PyDatetimeArrType_Type
PyTypeObject PyComplexFloatingArrType_Type
PyTypeObject PyBoolArrType_Type
bint PyArray_IsIntegerScalar(obj) nogil
bint PyArray_Check(obj) nogil
cdef extern from "numpy/npy_common.h":
int64_t NPY_MIN_INT64
cdef inline int64_t get_nat() noexcept:
return NPY_MIN_INT64
# --------------------------------------------------------------------
# Type Checking
cdef inline bint is_integer_object(object obj) noexcept nogil:
"""
Cython equivalent of
`isinstance(val, (int, long, np.integer)) and not isinstance(val, bool)`
Parameters
----------
val : object
Returns
-------
is_integer : bool
Notes
-----
This counts np.timedelta64 objects as integers.
"""
return (not PyBool_Check(obj) and PyArray_IsIntegerScalar(obj)
and not is_timedelta64_object(obj))
cdef inline bint is_float_object(object obj) noexcept nogil:
"""
Cython equivalent of `isinstance(val, (float, np.float64))`
Parameters
----------
val : object
Returns
-------
is_float : bool
"""
return (PyFloat_Check(obj) or
(PyObject_TypeCheck(obj, &PyFloatingArrType_Type)))
cdef inline bint is_complex_object(object obj) noexcept nogil:
"""
Cython equivalent of `isinstance(val, (complex, np.complex128))`
Parameters
----------
val : object
Returns
-------
is_complex : bool
"""
return (PyComplex_Check(obj) or
PyObject_TypeCheck(obj, &PyComplexFloatingArrType_Type))
cdef inline bint is_bool_object(object obj) noexcept nogil:
"""
Cython equivalent of `isinstance(val, (bool, np.bool_))`
Parameters
----------
val : object
Returns
-------
is_bool : bool
"""
return (PyBool_Check(obj) or
PyObject_TypeCheck(obj, &PyBoolArrType_Type))
cdef inline bint is_real_number_object(object obj) noexcept nogil:
return is_bool_object(obj) or is_integer_object(obj) or is_float_object(obj)
cdef inline bint is_timedelta64_object(object obj) noexcept nogil:
"""
Cython equivalent of `isinstance(val, np.timedelta64)`
Parameters
----------
val : object
Returns
-------
is_timedelta64 : bool
"""
return PyObject_TypeCheck(obj, &PyTimedeltaArrType_Type)
cdef inline bint is_datetime64_object(object obj) noexcept nogil:
"""
Cython equivalent of `isinstance(val, np.datetime64)`
Parameters
----------
val : object
Returns
-------
is_datetime64 : bool
"""
return PyObject_TypeCheck(obj, &PyDatetimeArrType_Type)
cdef inline bint is_array(object val) noexcept:
"""
Cython equivalent of `isinstance(val, np.ndarray)`
Parameters
----------
val : object
Returns
-------
is_ndarray : bool
"""
return PyArray_Check(val)
cdef inline bint is_nan(object val):
"""
Check if val is a Not-A-Number float or complex, including
float('NaN') and np.nan.
Parameters
----------
val : object
Returns
-------
is_nan : bool
"""
cdef float64_t fval
if is_float_object(val):
fval = val
return fval != fval
return is_complex_object(val) and val != val
cdef inline const char* get_c_string_buf_and_size(str py_string,
Py_ssize_t *length) except NULL:
"""
Extract internal char* buffer of unicode or bytes object `py_string` with
getting length of this internal buffer saved in `length`.
Notes
-----
Python object owns memory, thus returned char* must not be freed.
`length` can be NULL if getting buffer length is not needed.
Parameters
----------
py_string : str
length : Py_ssize_t*
Returns
-------
buf : const char*
"""
return PyUnicode_AsUTF8AndSize(py_string, length)
cdef inline const char* get_c_string(str py_string) except NULL:
return get_c_string_buf_and_size(py_string, NULL)
cdef inline bytes string_encode_locale(str py_string) noexcept:
"""As opposed to PyUnicode_Encode, use current system locale to encode."""
return PyUnicode_EncodeLocale(py_string, NULL)
cdef inline object char_to_string_locale(const char* data) noexcept:
"""As opposed to PyUnicode_FromString, use current system locale to decode."""
return PyUnicode_DecodeLocale(data, NULL)

View File

@@ -0,0 +1,43 @@
"""
For cython types that cannot be represented precisely, closest-available
python equivalents are used, and the precise types kept as adjacent comments.
"""
from datetime import tzinfo
import numpy as np
from pandas._libs.tslibs.dtypes import Resolution
from pandas._typing import npt
def dt64arr_to_periodarr(
stamps: npt.NDArray[np.int64],
freq: int,
tz: tzinfo | None,
reso: int = ..., # NPY_DATETIMEUNIT
) -> npt.NDArray[np.int64]: ...
def is_date_array_normalized(
stamps: npt.NDArray[np.int64],
tz: tzinfo | None,
reso: int, # NPY_DATETIMEUNIT
) -> bool: ...
def normalize_i8_timestamps(
stamps: npt.NDArray[np.int64],
tz: tzinfo | None,
reso: int, # NPY_DATETIMEUNIT
) -> npt.NDArray[np.int64]: ...
def get_resolution(
stamps: npt.NDArray[np.int64],
tz: tzinfo | None = ...,
reso: int = ..., # NPY_DATETIMEUNIT
) -> Resolution: ...
def ints_to_pydatetime(
arr: npt.NDArray[np.int64],
tz: tzinfo | None = ...,
box: str = ...,
reso: int = ..., # NPY_DATETIMEUNIT
) -> npt.NDArray[np.object_]: ...
def tz_convert_from_utc(
stamps: npt.NDArray[np.int64],
tz: tzinfo | None,
reso: int = ..., # NPY_DATETIMEUNIT
) -> npt.NDArray[np.int64]: ...

View File

@@ -0,0 +1,381 @@
cimport cython
cimport numpy as cnp
from cpython.datetime cimport (
date,
datetime,
time,
tzinfo,
)
from numpy cimport (
int64_t,
ndarray,
)
cnp.import_array()
from .dtypes import Resolution
from .dtypes cimport (
c_Resolution,
periods_per_day,
)
from .nattype cimport (
NPY_NAT,
c_NaT as NaT,
)
from .np_datetime cimport (
NPY_DATETIMEUNIT,
NPY_FR_ns,
import_pandas_datetime,
npy_datetimestruct,
pandas_datetime_to_datetimestruct,
)
import_pandas_datetime()
from .period cimport get_period_ordinal
from .timestamps cimport create_timestamp_from_ts
from .timezones cimport is_utc
from .tzconversion cimport Localizer
@cython.boundscheck(False)
@cython.wraparound(False)
def tz_convert_from_utc(ndarray stamps, tzinfo tz, NPY_DATETIMEUNIT reso=NPY_FR_ns):
# stamps is int64_t, arbitrary ndim
"""
Convert the values (in i8) from UTC to tz
Parameters
----------
stamps : ndarray[int64]
tz : tzinfo
Returns
-------
ndarray[int64]
"""
if tz is None or is_utc(tz) or stamps.size == 0:
# Much faster than going through the "standard" pattern below;
# do this before initializing Localizer.
return stamps.copy()
cdef:
Localizer info = Localizer(tz, creso=reso)
int64_t utc_val, local_val
Py_ssize_t pos, i, n = stamps.size
ndarray result
cnp.broadcast mi
result = cnp.PyArray_EMPTY(stamps.ndim, stamps.shape, cnp.NPY_INT64, 0)
mi = cnp.PyArray_MultiIterNew2(result, stamps)
for i in range(n):
# Analogous to: utc_val = stamps[i]
utc_val = (<int64_t*>cnp.PyArray_MultiIter_DATA(mi, 1))[0]
if utc_val == NPY_NAT:
local_val = NPY_NAT
else:
local_val = info.utc_val_to_local_val(utc_val, &pos)
# Analogous to: result[i] = local_val
(<int64_t*>cnp.PyArray_MultiIter_DATA(mi, 0))[0] = local_val
cnp.PyArray_MultiIter_NEXT(mi)
return result
# -------------------------------------------------------------------------
@cython.wraparound(False)
@cython.boundscheck(False)
def ints_to_pydatetime(
ndarray stamps,
tzinfo tz=None,
str box="datetime",
NPY_DATETIMEUNIT reso=NPY_FR_ns,
) -> ndarray:
# stamps is int64, arbitrary ndim
"""
Convert an i8 repr to an ndarray of datetimes, date, time or Timestamp.
Parameters
----------
stamps : array of i8
tz : str, optional
convert to this timezone
box : {'datetime', 'timestamp', 'date', 'time'}, default 'datetime'
* If datetime, convert to datetime.datetime
* If date, convert to datetime.date
* If time, convert to datetime.time
* If Timestamp, convert to pandas.Timestamp
reso : NPY_DATETIMEUNIT, default NPY_FR_ns
Returns
-------
ndarray[object] of type specified by box
"""
cdef:
Localizer info = Localizer(tz, creso=reso)
int64_t utc_val, local_val
Py_ssize_t i, n = stamps.size
Py_ssize_t pos = -1 # unused, avoid not-initialized warning
npy_datetimestruct dts
tzinfo new_tz
bint use_date = False, use_ts = False, use_pydt = False
object res_val
bint fold = 0
# Note that `result` (and thus `result_flat`) is C-order and
# `it` iterates C-order as well, so the iteration matches
# See discussion at
# github.com/pandas-dev/pandas/pull/46886#discussion_r860261305
ndarray result = cnp.PyArray_EMPTY(stamps.ndim, stamps.shape, cnp.NPY_OBJECT, 0)
object[::1] res_flat = result.ravel() # should NOT be a copy
cnp.flatiter it = cnp.PyArray_IterNew(stamps)
if box == "date":
assert (tz is None), "tz should be None when converting to date"
use_date = True
elif box == "timestamp":
use_ts = True
elif box == "datetime":
use_pydt = True
elif box != "time":
raise ValueError(
"box must be one of 'datetime', 'date', 'time' or 'timestamp'"
)
for i in range(n):
# Analogous to: utc_val = stamps[i]
utc_val = (<int64_t*>cnp.PyArray_ITER_DATA(it))[0]
new_tz = tz
if utc_val == NPY_NAT:
res_val = <object>NaT
else:
local_val = info.utc_val_to_local_val(utc_val, &pos, &fold)
if info.use_pytz:
# find right representation of dst etc in pytz timezone
new_tz = tz._tzinfos[tz._transition_info[pos]]
pandas_datetime_to_datetimestruct(local_val, reso, &dts)
if use_ts:
res_val = create_timestamp_from_ts(
utc_val, dts, new_tz, fold, reso=reso
)
elif use_pydt:
res_val = datetime(
dts.year, dts.month, dts.day, dts.hour, dts.min, dts.sec, dts.us,
new_tz, fold=fold,
)
elif use_date:
res_val = date(dts.year, dts.month, dts.day)
else:
res_val = time(dts.hour, dts.min, dts.sec, dts.us, new_tz, fold=fold)
# Note: we can index result directly instead of using PyArray_MultiIter_DATA
# like we do for the other functions because result is known C-contiguous
# and is the first argument to PyArray_MultiIterNew2. The usual pattern
# does not seem to work with object dtype.
# See discussion at
# github.com/pandas-dev/pandas/pull/46886#discussion_r860261305
res_flat[i] = res_val
cnp.PyArray_ITER_NEXT(it)
return result
# -------------------------------------------------------------------------
cdef c_Resolution _reso_stamp(npy_datetimestruct *dts):
if dts.ps != 0:
return c_Resolution.RESO_NS
elif dts.us != 0:
if dts.us % 1000 == 0:
return c_Resolution.RESO_MS
return c_Resolution.RESO_US
elif dts.sec != 0:
return c_Resolution.RESO_SEC
elif dts.min != 0:
return c_Resolution.RESO_MIN
elif dts.hour != 0:
return c_Resolution.RESO_HR
return c_Resolution.RESO_DAY
@cython.wraparound(False)
@cython.boundscheck(False)
def get_resolution(
ndarray stamps, tzinfo tz=None, NPY_DATETIMEUNIT reso=NPY_FR_ns
) -> Resolution:
# stamps is int64_t, any ndim
cdef:
Localizer info = Localizer(tz, creso=reso)
int64_t utc_val, local_val
Py_ssize_t i, n = stamps.size
Py_ssize_t pos = -1 # unused, avoid not-initialized warning
cnp.flatiter it = cnp.PyArray_IterNew(stamps)
npy_datetimestruct dts
c_Resolution pd_reso = c_Resolution.RESO_DAY, curr_reso
for i in range(n):
# Analogous to: utc_val = stamps[i]
utc_val = cnp.PyArray_GETITEM(stamps, cnp.PyArray_ITER_DATA(it))
if utc_val == NPY_NAT:
pass
else:
local_val = info.utc_val_to_local_val(utc_val, &pos)
pandas_datetime_to_datetimestruct(local_val, reso, &dts)
curr_reso = _reso_stamp(&dts)
if curr_reso < pd_reso:
pd_reso = curr_reso
cnp.PyArray_ITER_NEXT(it)
return Resolution(pd_reso)
# -------------------------------------------------------------------------
@cython.cdivision(False)
@cython.wraparound(False)
@cython.boundscheck(False)
cpdef ndarray normalize_i8_timestamps(ndarray stamps, tzinfo tz, NPY_DATETIMEUNIT reso):
# stamps is int64_t, arbitrary ndim
"""
Normalize each of the (nanosecond) timezone aware timestamps in the given
array by rounding down to the beginning of the day (i.e. midnight).
This is midnight for timezone, `tz`.
Parameters
----------
stamps : int64 ndarray
tz : tzinfo or None
reso : NPY_DATETIMEUNIT
Returns
-------
result : int64 ndarray of converted of normalized nanosecond timestamps
"""
cdef:
Localizer info = Localizer(tz, creso=reso)
int64_t utc_val, local_val, res_val
Py_ssize_t i, n = stamps.size
Py_ssize_t pos = -1 # unused, avoid not-initialized warning
ndarray result = cnp.PyArray_EMPTY(stamps.ndim, stamps.shape, cnp.NPY_INT64, 0)
cnp.broadcast mi = cnp.PyArray_MultiIterNew2(result, stamps)
int64_t ppd = periods_per_day(reso)
for i in range(n):
# Analogous to: utc_val = stamps[i]
utc_val = (<int64_t*>cnp.PyArray_MultiIter_DATA(mi, 1))[0]
if utc_val == NPY_NAT:
res_val = NPY_NAT
else:
local_val = info.utc_val_to_local_val(utc_val, &pos)
res_val = local_val - (local_val % ppd)
# Analogous to: result[i] = res_val
(<int64_t*>cnp.PyArray_MultiIter_DATA(mi, 0))[0] = res_val
cnp.PyArray_MultiIter_NEXT(mi)
return result
@cython.wraparound(False)
@cython.boundscheck(False)
def is_date_array_normalized(ndarray stamps, tzinfo tz, NPY_DATETIMEUNIT reso) -> bool:
# stamps is int64_t, arbitrary ndim
"""
Check if all of the given (nanosecond) timestamps are normalized to
midnight, i.e. hour == minute == second == 0. If the optional timezone
`tz` is not None, then this is midnight for this timezone.
Parameters
----------
stamps : int64 ndarray
tz : tzinfo or None
reso : NPY_DATETIMEUNIT
Returns
-------
is_normalized : bool True if all stamps are normalized
"""
cdef:
Localizer info = Localizer(tz, creso=reso)
int64_t utc_val, local_val
Py_ssize_t i, n = stamps.size
Py_ssize_t pos = -1 # unused, avoid not-initialized warning
cnp.flatiter it = cnp.PyArray_IterNew(stamps)
int64_t ppd = periods_per_day(reso)
for i in range(n):
# Analogous to: utc_val = stamps[i]
utc_val = cnp.PyArray_GETITEM(stamps, cnp.PyArray_ITER_DATA(it))
local_val = info.utc_val_to_local_val(utc_val, &pos)
if local_val % ppd != 0:
return False
cnp.PyArray_ITER_NEXT(it)
return True
# -------------------------------------------------------------------------
@cython.wraparound(False)
@cython.boundscheck(False)
def dt64arr_to_periodarr(
ndarray stamps, int freq, tzinfo tz, NPY_DATETIMEUNIT reso=NPY_FR_ns
):
# stamps is int64_t, arbitrary ndim
cdef:
Localizer info = Localizer(tz, creso=reso)
Py_ssize_t i, n = stamps.size
Py_ssize_t pos = -1 # unused, avoid not-initialized warning
int64_t utc_val, local_val, res_val
npy_datetimestruct dts
ndarray result = cnp.PyArray_EMPTY(stamps.ndim, stamps.shape, cnp.NPY_INT64, 0)
cnp.broadcast mi = cnp.PyArray_MultiIterNew2(result, stamps)
for i in range(n):
# Analogous to: utc_val = stamps[i]
utc_val = (<int64_t*>cnp.PyArray_MultiIter_DATA(mi, 1))[0]
if utc_val == NPY_NAT:
res_val = NPY_NAT
else:
local_val = info.utc_val_to_local_val(utc_val, &pos)
pandas_datetime_to_datetimestruct(local_val, reso, &dts)
res_val = get_period_ordinal(&dts, freq)
# Analogous to: result[i] = res_val
(<int64_t*>cnp.PyArray_MultiIter_DATA(mi, 0))[0] = res_val
cnp.PyArray_MultiIter_NEXT(mi)
return result