virtuelle Umgebungen teil20 und teil20a

This commit is contained in:
2023-09-01 15:57:01 +02:00
parent 69da4cda15
commit c9aee44812
8197 changed files with 1603063 additions and 0 deletions

View File

@@ -0,0 +1,103 @@
/*
Copyright (c) 2020, PyData Development Team
All rights reserved.
Distributed under the terms of the BSD Simplified License.
The full license is in the LICENSE file, distributed with this software.
*/
// Conversion routines that are useful for serialization,
// but which don't interact with JSON objects directly
#include "pandas/datetime/date_conversions.h"
#include "pandas/vendored/numpy/datetime/np_datetime.h"
#include "pandas/vendored/numpy/datetime/np_datetime_strings.h"
/*
* Function: scaleNanosecToUnit
* -----------------------------
*
* Scales an integer value representing time in nanoseconds to provided unit.
*
* Mutates the provided value directly. Returns 0 on success, non-zero on error.
*/
int scaleNanosecToUnit(npy_int64 *value, NPY_DATETIMEUNIT unit) {
switch (unit) {
case NPY_FR_ns:
break;
case NPY_FR_us:
*value /= 1000LL;
break;
case NPY_FR_ms:
*value /= 1000000LL;
break;
case NPY_FR_s:
*value /= 1000000000LL;
break;
default:
return -1;
}
return 0;
}
/* Converts the int64_t representation of a datetime to ISO; mutates len */
char *int64ToIso(int64_t value,
NPY_DATETIMEUNIT valueUnit,
NPY_DATETIMEUNIT base,
size_t *len) {
npy_datetimestruct dts;
int ret_code;
pandas_datetime_to_datetimestruct(value, valueUnit, &dts);
*len = (size_t)get_datetime_iso_8601_strlen(0, base);
char *result = PyObject_Malloc(*len);
if (result == NULL) {
PyErr_NoMemory();
return NULL;
}
// datetime64 is always naive
ret_code = make_iso_8601_datetime(&dts, result, *len, 0, base);
if (ret_code != 0) {
PyErr_SetString(PyExc_ValueError,
"Could not convert datetime value to string");
PyObject_Free(result);
}
// Note that get_datetime_iso_8601_strlen just gives a generic size
// for ISO string conversion, not the actual size used
*len = strlen(result);
return result;
}
npy_datetime NpyDateTimeToEpoch(npy_datetime dt, NPY_DATETIMEUNIT base) {
scaleNanosecToUnit(&dt, base);
return dt;
}
/* Converts the int64_t representation of a duration to ISO; mutates len */
char *int64ToIsoDuration(int64_t value, size_t *len) {
pandas_timedeltastruct tds;
int ret_code;
pandas_timedelta_to_timedeltastruct(value, NPY_FR_ns, &tds);
// Max theoretical length of ISO Duration with 64 bit day
// as the largest unit is 70 characters + 1 for a null terminator
char *result = PyObject_Malloc(71);
if (result == NULL) {
PyErr_NoMemory();
return NULL;
}
ret_code = make_iso_8601_timedelta(&tds, result, len);
if (ret_code == -1) {
PyErr_SetString(PyExc_ValueError,
"Could not convert timedelta value to string");
PyObject_Free(result);
return NULL;
}
return result;
}

View File

@@ -0,0 +1,253 @@
/*
Copyright (c) 2016, PyData Development Team
All rights reserved.
Distributed under the terms of the BSD Simplified License.
The full license is in the LICENSE file, distributed with this software.
Copyright (c) 2005-2011, NumPy Developers
All rights reserved.
This file is derived from NumPy 1.7. See NUMPY_LICENSE.txt
*/
#define _PANDAS_DATETIME_IMPL
#define PY_SSIZE_T_CLEAN
#include <Python.h>
#include "datetime.h"
#include "pandas/datetime/pd_datetime.h"
static void pandas_datetime_destructor(PyObject *op) {
void *ptr = PyCapsule_GetPointer(op, PandasDateTime_CAPSULE_NAME);
PyMem_Free(ptr);
}
/*
*
* Converts a Python datetime.datetime or datetime.date
* object into a NumPy npy_datetimestruct. Uses tzinfo (if present)
* to convert to UTC time.
*
* The following implementation just asks for attributes, and thus
* supports datetime duck typing. The tzinfo time zone conversion
* requires this style of access as well.
*
* Returns -1 on error, 0 on success, and 1 (with no error set)
* if obj doesn't have the needed date or datetime attributes.
*/
static int convert_pydatetime_to_datetimestruct(PyObject *dtobj,
npy_datetimestruct *out) {
// Assumes that obj is a valid datetime object
PyObject *tmp;
PyObject *obj = (PyObject*)dtobj;
/* Initialize the output to all zeros */
memset(out, 0, sizeof(npy_datetimestruct));
out->month = 1;
out->day = 1;
out->year = PyLong_AsLong(PyObject_GetAttrString(obj, "year"));
out->month = PyLong_AsLong(PyObject_GetAttrString(obj, "month"));
out->day = PyLong_AsLong(PyObject_GetAttrString(obj, "day"));
// TODO(anyone): If we can get PyDateTime_IMPORT to work, we could use
// PyDateTime_Check here, and less verbose attribute lookups.
/* Check for time attributes (if not there, return success as a date) */
if (!PyObject_HasAttrString(obj, "hour") ||
!PyObject_HasAttrString(obj, "minute") ||
!PyObject_HasAttrString(obj, "second") ||
!PyObject_HasAttrString(obj, "microsecond")) {
return 0;
}
out->hour = PyLong_AsLong(PyObject_GetAttrString(obj, "hour"));
out->min = PyLong_AsLong(PyObject_GetAttrString(obj, "minute"));
out->sec = PyLong_AsLong(PyObject_GetAttrString(obj, "second"));
out->us = PyLong_AsLong(PyObject_GetAttrString(obj, "microsecond"));
if (PyObject_HasAttrString(obj, "tzinfo")) {
PyObject *offset = extract_utc_offset(obj);
/* Apply the time zone offset if datetime obj is tz-aware */
if (offset != NULL) {
if (offset == Py_None) {
Py_DECREF(offset);
return 0;
}
PyObject *tmp_int;
int seconds_offset, minutes_offset;
/*
* The timedelta should have a function "total_seconds"
* which contains the value we want.
*/
tmp = PyObject_CallMethod(offset, "total_seconds", "");
Py_DECREF(offset);
if (tmp == NULL) {
return -1;
}
tmp_int = PyNumber_Long(tmp);
if (tmp_int == NULL) {
Py_DECREF(tmp);
return -1;
}
seconds_offset = PyLong_AsLong(tmp_int);
if (seconds_offset == -1 && PyErr_Occurred()) {
Py_DECREF(tmp_int);
Py_DECREF(tmp);
return -1;
}
Py_DECREF(tmp_int);
Py_DECREF(tmp);
/* Convert to a minutes offset and apply it */
minutes_offset = seconds_offset / 60;
add_minutes_to_datetimestruct(out, -minutes_offset);
}
}
return 0;
}
// Converts a Python object representing a Date / Datetime to ISO format
// up to precision `base` e.g. base="s" yields 2020-01-03T00:00:00Z
// while base="ns" yields "2020-01-01T00:00:00.000000000Z"
// len is mutated to save the length of the returned string
static char *PyDateTimeToIso(PyObject *obj, NPY_DATETIMEUNIT base,
size_t *len) {
npy_datetimestruct dts;
int ret;
ret = convert_pydatetime_to_datetimestruct(obj, &dts);
if (ret != 0) {
if (!PyErr_Occurred()) {
PyErr_SetString(PyExc_ValueError,
"Could not convert PyDateTime to numpy datetime");
}
return NULL;
}
*len = (size_t)get_datetime_iso_8601_strlen(0, base);
char *result = PyObject_Malloc(*len);
// Check to see if PyDateTime has a timezone.
// Don't convert to UTC if it doesn't.
int is_tz_aware = 0;
if (PyObject_HasAttrString(obj, "tzinfo")) {
PyObject *offset = extract_utc_offset(obj);
if (offset == NULL) {
PyObject_Free(result);
return NULL;
}
is_tz_aware = offset != Py_None;
Py_DECREF(offset);
}
ret = make_iso_8601_datetime(&dts, result, *len, is_tz_aware, base);
if (ret != 0) {
PyErr_SetString(PyExc_ValueError,
"Could not convert datetime value to string");
PyObject_Free(result);
return NULL;
}
// Note that get_datetime_iso_8601_strlen just gives a generic size
// for ISO string conversion, not the actual size used
*len = strlen(result);
return result;
}
// Convert a Python Date/Datetime to Unix epoch with resolution base
static npy_datetime PyDateTimeToEpoch(PyObject *dt, NPY_DATETIMEUNIT base) {
npy_datetimestruct dts;
int ret;
ret = convert_pydatetime_to_datetimestruct(dt, &dts);
if (ret != 0) {
if (!PyErr_Occurred()) {
PyErr_SetString(PyExc_ValueError,
"Could not convert PyDateTime to numpy datetime");
}
// TODO(username): is setting errMsg required?
// ((JSONObjectEncoder *)tc->encoder)->errorMsg = "";
// return NULL;
}
npy_datetime npy_dt = npy_datetimestruct_to_datetime(NPY_FR_ns, &dts);
return NpyDateTimeToEpoch(npy_dt, base);
}
static int pandas_datetime_exec(PyObject *module) {
PyDateTime_IMPORT;
PandasDateTime_CAPI *capi = PyMem_Malloc(sizeof(PandasDateTime_CAPI));
if (capi == NULL) {
PyErr_NoMemory();
return -1;
}
capi->npy_datetimestruct_to_datetime = npy_datetimestruct_to_datetime;
capi->scaleNanosecToUnit = scaleNanosecToUnit;
capi->int64ToIso = int64ToIso;
capi->NpyDateTimeToEpoch = NpyDateTimeToEpoch;
capi->PyDateTimeToIso = PyDateTimeToIso;
capi->PyDateTimeToEpoch = PyDateTimeToEpoch;
capi->int64ToIsoDuration = int64ToIsoDuration;
capi->pandas_datetime_to_datetimestruct = pandas_datetime_to_datetimestruct;
capi->pandas_timedelta_to_timedeltastruct =
pandas_timedelta_to_timedeltastruct;
capi->convert_pydatetime_to_datetimestruct =
convert_pydatetime_to_datetimestruct;
capi->cmp_npy_datetimestruct = cmp_npy_datetimestruct;
capi->get_datetime_metadata_from_dtype = get_datetime_metadata_from_dtype;
capi->parse_iso_8601_datetime = parse_iso_8601_datetime;
capi->get_datetime_iso_8601_strlen = get_datetime_iso_8601_strlen;
capi->make_iso_8601_datetime = make_iso_8601_datetime;
capi->make_iso_8601_timedelta = make_iso_8601_timedelta;
PyObject *capsule = PyCapsule_New(capi, PandasDateTime_CAPSULE_NAME,
pandas_datetime_destructor);
if (capsule == NULL) {
PyMem_Free(capi);
return -1;
}
// Monkeypatch the top level pandas module to have an attribute for the
// C-API. This is required because Python capsules do not support setting
// this attribute on anything but the top level package. Ideally not
// done when cpython gh-6898 gets implemented
PyObject *pandas = PyImport_ImportModule("pandas");
if (!pandas) {
PyErr_SetString(PyExc_ImportError,
"pd_datetime.c could not import module pandas");
Py_DECREF(capsule);
return -1;
}
if (PyModule_AddObject(pandas, "_pandas_datetime_CAPI", capsule) < 0) {
Py_DECREF(capsule);
return -1;
}
return 0;
}
static PyModuleDef_Slot pandas_datetime_slots[] = {
{Py_mod_exec, pandas_datetime_exec}, {0, NULL}};
static struct PyModuleDef pandas_datetimemodule = {
PyModuleDef_HEAD_INIT,
.m_name = "pandas._libs.pandas_datetime",
.m_doc = "Internal module with datetime support for other extensions",
.m_size = 0,
.m_methods = NULL,
.m_slots = pandas_datetime_slots};
PyMODINIT_FUNC PyInit_pandas_datetime(void) {
PyDateTime_IMPORT;
return PyModuleDef_Init(&pandas_datetimemodule);
}

View File

@@ -0,0 +1,107 @@
/*
Copyright (c) 2016, PyData Development Team
All rights reserved.
Distributed under the terms of the BSD Simplified License.
The full license is in the LICENSE file, distributed with this software.
*/
#include "pandas/parser/io.h"
/*
On-disk FILE, uncompressed
*/
void *new_rd_source(PyObject *obj) {
rd_source *rds = (rd_source *)malloc(sizeof(rd_source));
if (rds == NULL) {
PyErr_NoMemory();
return NULL;
}
/* hold on to this object */
Py_INCREF(obj);
rds->obj = obj;
rds->buffer = NULL;
rds->position = 0;
return (void *)rds;
}
/*
Cleanup callbacks
*/
int del_rd_source(void *rds) {
Py_XDECREF(RDS(rds)->obj);
Py_XDECREF(RDS(rds)->buffer);
free(rds);
return 0;
}
/*
IO callbacks
*/
void *buffer_rd_bytes(void *source, size_t nbytes, size_t *bytes_read,
int *status, const char *encoding_errors) {
PyGILState_STATE state;
PyObject *result, *func, *args, *tmp;
void *retval;
size_t length;
rd_source *src = RDS(source);
state = PyGILState_Ensure();
/* delete old object */
Py_XDECREF(src->buffer);
src->buffer = NULL;
args = Py_BuildValue("(i)", nbytes);
func = PyObject_GetAttrString(src->obj, "read");
/* Note: PyObject_CallObject requires the GIL */
result = PyObject_CallObject(func, args);
Py_XDECREF(args);
Py_XDECREF(func);
if (result == NULL) {
PyGILState_Release(state);
*bytes_read = 0;
*status = CALLING_READ_FAILED;
return NULL;
} else if (!PyBytes_Check(result)) {
tmp = PyUnicode_AsEncodedString(result, "utf-8", encoding_errors);
Py_DECREF(result);
if (tmp == NULL) {
PyGILState_Release(state);
return NULL;
}
result = tmp;
}
length = PySequence_Length(result);
if (length == 0)
*status = REACHED_EOF;
else
*status = 0;
/* hang on to the Python object */
src->buffer = result;
retval = (void *)PyBytes_AsString(result);
PyGILState_Release(state);
/* TODO: more error handling */
*bytes_read = length;
return retval;
}

View File

@@ -0,0 +1,178 @@
/*
Copyright (c) 2023, PyData Development Team
All rights reserved.
Distributed under the terms of the BSD Simplified License.
*/
#define _PANDAS_PARSER_IMPL
#include "pandas/parser/pd_parser.h"
#include "pandas/parser/io.h"
static int to_double(char *item, double *p_value, char sci, char decimal,
int *maybe_int) {
char *p_end = NULL;
int error = 0;
/* Switch to precise xstrtod GH 31364 */
*p_value =
precise_xstrtod(item, &p_end, decimal, sci, '\0', 1, &error, maybe_int);
return (error == 0) && (!*p_end);
}
static int floatify(PyObject *str, double *result, int *maybe_int) {
int status;
char *data;
PyObject *tmp = NULL;
const char sci = 'E';
const char dec = '.';
if (PyBytes_Check(str)) {
data = PyBytes_AS_STRING(str);
} else if (PyUnicode_Check(str)) {
tmp = PyUnicode_AsUTF8String(str);
if (tmp == NULL) {
return -1;
}
data = PyBytes_AS_STRING(tmp);
} else {
PyErr_SetString(PyExc_TypeError, "Invalid object type");
return -1;
}
status = to_double(data, result, sci, dec, maybe_int);
if (!status) {
/* handle inf/-inf infinity/-infinity */
if (strlen(data) == 3) {
if (0 == strcasecmp(data, "inf")) {
*result = HUGE_VAL;
*maybe_int = 0;
} else {
goto parsingerror;
}
} else if (strlen(data) == 4) {
if (0 == strcasecmp(data, "-inf")) {
*result = -HUGE_VAL;
*maybe_int = 0;
} else if (0 == strcasecmp(data, "+inf")) {
*result = HUGE_VAL;
*maybe_int = 0;
} else {
goto parsingerror;
}
} else if (strlen(data) == 8) {
if (0 == strcasecmp(data, "infinity")) {
*result = HUGE_VAL;
*maybe_int = 0;
} else {
goto parsingerror;
}
} else if (strlen(data) == 9) {
if (0 == strcasecmp(data, "-infinity")) {
*result = -HUGE_VAL;
*maybe_int = 0;
} else if (0 == strcasecmp(data, "+infinity")) {
*result = HUGE_VAL;
*maybe_int = 0;
} else {
goto parsingerror;
}
} else {
goto parsingerror;
}
}
Py_XDECREF(tmp);
return 0;
parsingerror:
PyErr_Format(PyExc_ValueError, "Unable to parse string \"%s\"", data);
Py_XDECREF(tmp);
return -1;
}
static void pandas_parser_destructor(PyObject *op) {
void *ptr = PyCapsule_GetPointer(op, PandasParser_CAPSULE_NAME);
PyMem_Free(ptr);
}
static int pandas_parser_exec(PyObject *module) {
PandasParser_CAPI *capi = PyMem_Malloc(sizeof(PandasParser_CAPI));
if (capi == NULL) {
PyErr_NoMemory();
return -1;
}
capi->to_double = to_double;
capi->floatify = floatify;
capi->new_rd_source = new_rd_source;
capi->del_rd_source = del_rd_source;
capi->buffer_rd_bytes = buffer_rd_bytes;
capi->uint_state_init = uint_state_init;
capi->uint64_conflict = uint64_conflict;
capi->coliter_setup = coliter_setup;
capi->parser_new = parser_new;
capi->parser_init = parser_init;
capi->parser_free = parser_free;
capi->parser_del = parser_del;
capi->parser_add_skiprow = parser_add_skiprow;
capi->parser_set_skipfirstnrows = parser_set_skipfirstnrows;
capi->parser_set_default_options = parser_set_default_options;
capi->parser_consume_rows = parser_consume_rows;
capi->parser_trim_buffers = parser_trim_buffers;
capi->tokenize_all_rows = tokenize_all_rows;
capi->tokenize_nrows = tokenize_nrows;
capi->str_to_int64 = str_to_int64;
capi->str_to_uint64 = str_to_uint64;
capi->xstrtod = xstrtod;
capi->precise_xstrtod = precise_xstrtod;
capi->round_trip = round_trip;
capi->to_boolean = to_boolean;
PyObject *capsule =
PyCapsule_New(capi, PandasParser_CAPSULE_NAME, pandas_parser_destructor);
if (capsule == NULL) {
PyMem_Free(capi);
return -1;
}
// Monkeypatch the top level pandas module to have an attribute for the
// C-API. This is required because Python capsules do not support setting
// this attribute on anything but the top level package. Ideally not
// done when cpython gh-6898 gets implemented
PyObject *pandas = PyImport_ImportModule("pandas");
if (!pandas) {
PyErr_SetString(PyExc_ImportError,
"pd_parser.c could not import module pandas");
Py_DECREF(capsule);
return -1;
}
if (PyModule_AddObject(pandas, "_pandas_parser_CAPI", capsule) < 0) {
Py_DECREF(capsule);
return -1;
}
return 0;
}
static PyModuleDef_Slot pandas_parser_slots[] = {
{Py_mod_exec, pandas_parser_exec}, {0, NULL}};
static struct PyModuleDef pandas_parsermodule = {
PyModuleDef_HEAD_INIT,
.m_name = "pandas._libs.pandas_parser",
.m_doc = "Internal module with parser support for other extensions",
.m_size = 0,
.m_methods = NULL,
.m_slots = pandas_parser_slots};
PyMODINIT_FUNC PyInit_pandas_parser(void) {
return PyModuleDef_Init(&pandas_parsermodule);
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,947 @@
/*
Copyright (c) 2016, PyData Development Team
All rights reserved.
Distributed under the terms of the BSD Simplified License.
The full license is in the LICENSE file, distributed with this software.
Copyright (c) 2005-2011, NumPy Developers
All rights reserved.
This file is derived from NumPy 1.7. See NUMPY_LICENSE.txt
*/
#define NO_IMPORT
#ifndef NPY_NO_DEPRECATED_API
#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
#endif // NPY_NO_DEPRECATED_API
#include <Python.h>
#include <numpy/arrayobject.h>
#include <numpy/arrayscalars.h>
#include <numpy/ndarraytypes.h>
#include "pandas/vendored/numpy/datetime/np_datetime.h"
const int days_per_month_table[2][12] = {
{31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31},
{31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}};
/*
* Returns 1 if the given year is a leap year, 0 otherwise.
*/
int is_leapyear(npy_int64 year) {
return (year & 0x3) == 0 && /* year % 4 == 0 */
((year % 100) != 0 || (year % 400) == 0);
}
/*
* Adjusts a datetimestruct based on a minutes offset. Assumes
* the current values are valid.g
*/
void add_minutes_to_datetimestruct(npy_datetimestruct *dts, int minutes) {
int isleap;
/* MINUTES */
dts->min += minutes;
while (dts->min < 0) {
dts->min += 60;
dts->hour--;
}
while (dts->min >= 60) {
dts->min -= 60;
dts->hour++;
}
/* HOURS */
while (dts->hour < 0) {
dts->hour += 24;
dts->day--;
}
while (dts->hour >= 24) {
dts->hour -= 24;
dts->day++;
}
/* DAYS */
if (dts->day < 1) {
dts->month--;
if (dts->month < 1) {
dts->year--;
dts->month = 12;
}
isleap = is_leapyear(dts->year);
dts->day += days_per_month_table[isleap][dts->month - 1];
} else if (dts->day > 28) {
isleap = is_leapyear(dts->year);
if (dts->day > days_per_month_table[isleap][dts->month - 1]) {
dts->day -= days_per_month_table[isleap][dts->month - 1];
dts->month++;
if (dts->month > 12) {
dts->year++;
dts->month = 1;
}
}
}
}
/*
* Calculates the days offset from the 1970 epoch.
*/
npy_int64 get_datetimestruct_days(const npy_datetimestruct *dts) {
int i, month;
npy_int64 year, days = 0;
const int *month_lengths;
year = dts->year - 1970;
days = year * 365;
/* Adjust for leap years */
if (days >= 0) {
/*
* 1968 is the closest leap year before 1970.
* Exclude the current year, so add 1.
*/
year += 1;
/* Add one day for each 4 years */
days += year / 4;
/* 1900 is the closest previous year divisible by 100 */
year += 68;
/* Subtract one day for each 100 years */
days -= year / 100;
/* 1600 is the closest previous year divisible by 400 */
year += 300;
/* Add one day for each 400 years */
days += year / 400;
} else {
/*
* 1972 is the closest later year after 1970.
* Include the current year, so subtract 2.
*/
year -= 2;
/* Subtract one day for each 4 years */
days += year / 4;
/* 2000 is the closest later year divisible by 100 */
year -= 28;
/* Add one day for each 100 years */
days -= year / 100;
/* 2000 is also the closest later year divisible by 400 */
/* Subtract one day for each 400 years */
days += year / 400;
}
month_lengths = days_per_month_table[is_leapyear(dts->year)];
month = dts->month - 1;
/* Add the months */
for (i = 0; i < month; ++i) {
days += month_lengths[i];
}
/* Add the days */
days += dts->day - 1;
return days;
}
/*
* Modifies '*days_' to be the day offset within the year,
* and returns the year.
*/
static npy_int64 days_to_yearsdays(npy_int64 *days_) {
const npy_int64 days_per_400years = (400 * 365 + 100 - 4 + 1);
/* Adjust so it's relative to the year 2000 (divisible by 400) */
npy_int64 days = (*days_) - (365 * 30 + 7);
npy_int64 year;
/* Break down the 400 year cycle to get the year and day within the year */
if (days >= 0) {
year = 400 * (days / days_per_400years);
days = days % days_per_400years;
} else {
year = 400 * ((days - (days_per_400years - 1)) / days_per_400years);
days = days % days_per_400years;
if (days < 0) {
days += days_per_400years;
}
}
/* Work out the year/day within the 400 year cycle */
if (days >= 366) {
year += 100 * ((days - 1) / (100 * 365 + 25 - 1));
days = (days - 1) % (100 * 365 + 25 - 1);
if (days >= 365) {
year += 4 * ((days + 1) / (4 * 365 + 1));
days = (days + 1) % (4 * 365 + 1);
if (days >= 366) {
year += (days - 1) / 365;
days = (days - 1) % 365;
}
}
}
*days_ = days;
return year + 2000;
}
/*
* Fills in the year, month, day in 'dts' based on the days
* offset from 1970.
*/
static void set_datetimestruct_days(npy_int64 days, npy_datetimestruct *dts) {
const int *month_lengths;
int i;
dts->year = days_to_yearsdays(&days);
month_lengths = days_per_month_table[is_leapyear(dts->year)];
for (i = 0; i < 12; ++i) {
if (days < month_lengths[i]) {
dts->month = i + 1;
dts->day = days + 1;
return;
} else {
days -= month_lengths[i];
}
}
}
/*
* Compares two npy_datetimestruct objects chronologically
*/
int cmp_npy_datetimestruct(const npy_datetimestruct *a,
const npy_datetimestruct *b) {
if (a->year > b->year) {
return 1;
} else if (a->year < b->year) {
return -1;
}
if (a->month > b->month) {
return 1;
} else if (a->month < b->month) {
return -1;
}
if (a->day > b->day) {
return 1;
} else if (a->day < b->day) {
return -1;
}
if (a->hour > b->hour) {
return 1;
} else if (a->hour < b->hour) {
return -1;
}
if (a->min > b->min) {
return 1;
} else if (a->min < b->min) {
return -1;
}
if (a->sec > b->sec) {
return 1;
} else if (a->sec < b->sec) {
return -1;
}
if (a->us > b->us) {
return 1;
} else if (a->us < b->us) {
return -1;
}
if (a->ps > b->ps) {
return 1;
} else if (a->ps < b->ps) {
return -1;
}
if (a->as > b->as) {
return 1;
} else if (a->as < b->as) {
return -1;
}
return 0;
}
/*
* Returns the offset from utc of the timezone as a timedelta.
* The caller is responsible for ensuring that the tzinfo
* attribute exists on the datetime object.
*
* If the passed object is timezone naive, Py_None is returned.
* If extraction of the offset fails, NULL is returned.
*
* NOTE: This function is not vendored from numpy.
*/
PyObject *extract_utc_offset(PyObject *obj) {
PyObject *tmp = PyObject_GetAttrString(obj, "tzinfo");
if (tmp == NULL) {
return NULL;
}
if (tmp != Py_None) {
PyObject *offset = PyObject_CallMethod(tmp, "utcoffset", "O", obj);
if (offset == NULL) {
Py_DECREF(tmp);
return NULL;
}
return offset;
}
return tmp;
}
/*
* Converts a datetime from a datetimestruct to a datetime based
* on a metadata unit. The date is assumed to be valid.
*/
npy_datetime npy_datetimestruct_to_datetime(NPY_DATETIMEUNIT base,
const npy_datetimestruct *dts) {
npy_datetime ret;
if (base == NPY_FR_Y) {
/* Truncate to the year */
ret = dts->year - 1970;
} else if (base == NPY_FR_M) {
/* Truncate to the month */
ret = 12 * (dts->year - 1970) + (dts->month - 1);
} else {
/* Otherwise calculate the number of days to start */
npy_int64 days = get_datetimestruct_days(dts);
switch (base) {
case NPY_FR_W:
/* Truncate to weeks */
if (days >= 0) {
ret = days / 7;
} else {
ret = (days - 6) / 7;
}
break;
case NPY_FR_D:
ret = days;
break;
case NPY_FR_h:
ret = days * 24 + dts->hour;
break;
case NPY_FR_m:
ret = (days * 24 + dts->hour) * 60 + dts->min;
break;
case NPY_FR_s:
ret = ((days * 24 + dts->hour) * 60 + dts->min) * 60 + dts->sec;
break;
case NPY_FR_ms:
ret = (((days * 24 + dts->hour) * 60 + dts->min) * 60 +
dts->sec) *
1000 +
dts->us / 1000;
break;
case NPY_FR_us:
ret = (((days * 24 + dts->hour) * 60 + dts->min) * 60 +
dts->sec) *
1000000 +
dts->us;
break;
case NPY_FR_ns:
ret = ((((days * 24 + dts->hour) * 60 + dts->min) * 60 +
dts->sec) *
1000000 +
dts->us) *
1000 +
dts->ps / 1000;
break;
case NPY_FR_ps:
ret = ((((days * 24 + dts->hour) * 60 + dts->min) * 60 +
dts->sec) *
1000000 +
dts->us) *
1000000 +
dts->ps;
break;
case NPY_FR_fs:
/* only 2.6 hours */
ret = (((((days * 24 + dts->hour) * 60 + dts->min) * 60 +
dts->sec) *
1000000 +
dts->us) *
1000000 +
dts->ps) *
1000 +
dts->as / 1000;
break;
case NPY_FR_as:
/* only 9.2 secs */
ret = (((((days * 24 + dts->hour) * 60 + dts->min) * 60 +
dts->sec) *
1000000 +
dts->us) *
1000000 +
dts->ps) *
1000000 +
dts->as;
break;
default:
/* Something got corrupted */
PyErr_SetString(
PyExc_ValueError,
"NumPy datetime metadata with corrupt unit value");
return -1;
}
}
return ret;
}
/*
* Port numpy#13188 https://github.com/numpy/numpy/pull/13188/
*
* Computes the python `ret, d = divmod(d, unit)`.
*
* Note that GCC is smart enough at -O2 to eliminate the `if(*d < 0)` branch
* for subsequent calls to this command - it is able to deduce that `*d >= 0`.
*/
npy_int64 extract_unit(npy_datetime *d, npy_datetime unit) {
assert(unit > 0);
npy_int64 div = *d / unit;
npy_int64 mod = *d % unit;
if (mod < 0) {
mod += unit;
div -= 1;
}
assert(mod >= 0);
*d = mod;
return div;
}
/*
* Converts a datetime based on the given metadata into a datetimestruct
*/
void pandas_datetime_to_datetimestruct(npy_datetime dt,
NPY_DATETIMEUNIT base,
npy_datetimestruct *out) {
npy_int64 perday;
/* Initialize the output to all zeros */
memset(out, 0, sizeof(npy_datetimestruct));
out->year = 1970;
out->month = 1;
out->day = 1;
/*
* Note that care must be taken with the / and % operators
* for negative values.
*/
switch (base) {
case NPY_FR_Y:
out->year = 1970 + dt;
break;
case NPY_FR_M:
out->year = 1970 + extract_unit(&dt, 12);
out->month = dt + 1;
break;
case NPY_FR_W:
/* A week is 7 days */
set_datetimestruct_days(dt * 7, out);
break;
case NPY_FR_D:
set_datetimestruct_days(dt, out);
break;
case NPY_FR_h:
perday = 24LL;
set_datetimestruct_days(extract_unit(&dt, perday), out);
out->hour = dt;
break;
case NPY_FR_m:
perday = 24LL * 60;
set_datetimestruct_days(extract_unit(&dt, perday), out);
out->hour = (int)extract_unit(&dt, 60);
out->min = (int)dt;
break;
case NPY_FR_s:
perday = 24LL * 60 * 60;
set_datetimestruct_days(extract_unit(&dt, perday), out);
out->hour = (int)extract_unit(&dt, 60 * 60);
out->min = (int)extract_unit(&dt, 60);
out->sec = (int)dt;
break;
case NPY_FR_ms:
perday = 24LL * 60 * 60 * 1000;
set_datetimestruct_days(extract_unit(&dt, perday), out);
out->hour = (int)extract_unit(&dt, 1000LL * 60 * 60);
out->min = (int)extract_unit(&dt, 1000LL * 60);
out->sec = (int)extract_unit(&dt, 1000LL);
out->us = (int)(dt * 1000);
break;
case NPY_FR_us:
perday = 24LL * 60LL * 60LL * 1000LL * 1000LL;
set_datetimestruct_days(extract_unit(&dt, perday), out);
out->hour = (int)extract_unit(&dt, 1000LL * 1000 * 60 * 60);
out->min = (int)extract_unit(&dt, 1000LL * 1000 * 60);
out->sec = (int)extract_unit(&dt, 1000LL * 1000);
out->us = (int)dt;
break;
case NPY_FR_ns:
perday = 24LL * 60LL * 60LL * 1000LL * 1000LL * 1000LL;
set_datetimestruct_days(extract_unit(&dt, perday), out);
out->hour = (int)extract_unit(&dt, 1000LL * 1000 * 1000 * 60 * 60);
out->min = (int)extract_unit(&dt, 1000LL * 1000 * 1000 * 60);
out->sec = (int)extract_unit(&dt, 1000LL * 1000 * 1000);
out->us = (int)extract_unit(&dt, 1000LL);
out->ps = (int)(dt * 1000);
break;
case NPY_FR_ps:
perday = 24LL * 60 * 60 * 1000 * 1000 * 1000 * 1000;
set_datetimestruct_days(extract_unit(&dt, perday), out);
out->hour = (int)extract_unit(&dt, 1000LL * 1000 * 1000 * 60 * 60);
out->min = (int)extract_unit(&dt, 1000LL * 1000 * 1000 * 60);
out->sec = (int)extract_unit(&dt, 1000LL * 1000 * 1000);
out->us = (int)extract_unit(&dt, 1000LL);
out->ps = (int)(dt * 1000);
break;
case NPY_FR_fs:
/* entire range is only +- 2.6 hours */
out->hour = (int)extract_unit(&dt, 1000LL * 1000 * 1000 * 1000 *
1000 * 60 * 60);
if (out->hour < 0) {
out->year = 1969;
out->month = 12;
out->day = 31;
out->hour += 24;
assert(out->hour >= 0);
}
out->min = (int)extract_unit(&dt, 1000LL * 1000 * 1000 * 1000 *
1000 * 60);
out->sec = (int)extract_unit(&dt, 1000LL * 1000 * 1000 * 1000 *
1000);
out->us = (int)extract_unit(&dt, 1000LL * 1000 * 1000);
out->ps = (int)extract_unit(&dt, 1000LL);
out->as = (int)(dt * 1000);
break;
case NPY_FR_as:
/* entire range is only +- 9.2 seconds */
out->sec = (int)extract_unit(&dt, 1000LL * 1000 * 1000 * 1000 *
1000 * 1000);
if (out->sec < 0) {
out->year = 1969;
out->month = 12;
out->day = 31;
out->hour = 23;
out->min = 59;
out->sec += 60;
assert(out->sec >= 0);
}
out->us = (int)extract_unit(&dt, 1000LL * 1000 * 1000 * 1000);
out->ps = (int)extract_unit(&dt, 1000LL * 1000);
out->as = (int)dt;
break;
default:
PyErr_SetString(PyExc_RuntimeError,
"NumPy datetime metadata is corrupted with invalid "
"base unit");
}
}
/*
* Converts a timedelta from a timedeltastruct to a timedelta based
* on a metadata unit. The timedelta is assumed to be valid.
*
* Returns 0 on success, -1 on failure.
*/
void pandas_timedelta_to_timedeltastruct(npy_timedelta td,
NPY_DATETIMEUNIT base,
pandas_timedeltastruct *out) {
npy_int64 frac;
npy_int64 sfrac;
npy_int64 ifrac;
int sign;
npy_int64 per_day;
npy_int64 per_sec;
/* Initialize the output to all zeros */
memset(out, 0, sizeof(pandas_timedeltastruct));
switch (base) {
case NPY_FR_ns:
per_day = 86400000000000LL;
per_sec = 1000LL * 1000LL * 1000LL;
// put frac in seconds
if (td < 0 && td % per_sec != 0)
frac = td / per_sec - 1;
else
frac = td / per_sec;
if (frac < 0) {
sign = -1;
// even fraction
if ((-frac % 86400LL) != 0) {
out->days = -frac / 86400LL + 1;
frac += 86400LL * out->days;
} else {
frac = -frac;
}
} else {
sign = 1;
out->days = 0;
}
if (frac >= 86400) {
out->days += frac / 86400LL;
frac -= out->days * 86400LL;
}
if (frac >= 3600) {
out->hrs = frac / 3600LL;
frac -= out->hrs * 3600LL;
} else {
out->hrs = 0;
}
if (frac >= 60) {
out->min = frac / 60LL;
frac -= out->min * 60LL;
} else {
out->min = 0;
}
if (frac >= 0) {
out->sec = frac;
frac -= out->sec;
} else {
out->sec = 0;
}
sfrac = (out->hrs * 3600LL + out->min * 60LL
+ out->sec) * per_sec;
if (sign < 0)
out->days = -out->days;
ifrac = td - (out->days * per_day + sfrac);
if (ifrac != 0) {
out->ms = ifrac / (1000LL * 1000LL);
ifrac -= out->ms * 1000LL * 1000LL;
out->us = ifrac / 1000LL;
ifrac -= out->us * 1000LL;
out->ns = ifrac;
} else {
out->ms = 0;
out->us = 0;
out->ns = 0;
}
break;
case NPY_FR_us:
per_day = 86400000000LL;
per_sec = 1000LL * 1000LL;
// put frac in seconds
if (td < 0 && td % per_sec != 0)
frac = td / per_sec - 1;
else
frac = td / per_sec;
if (frac < 0) {
sign = -1;
// even fraction
if ((-frac % 86400LL) != 0) {
out->days = -frac / 86400LL + 1;
frac += 86400LL * out->days;
} else {
frac = -frac;
}
} else {
sign = 1;
out->days = 0;
}
if (frac >= 86400) {
out->days += frac / 86400LL;
frac -= out->days * 86400LL;
}
if (frac >= 3600) {
out->hrs = frac / 3600LL;
frac -= out->hrs * 3600LL;
} else {
out->hrs = 0;
}
if (frac >= 60) {
out->min = frac / 60LL;
frac -= out->min * 60LL;
} else {
out->min = 0;
}
if (frac >= 0) {
out->sec = frac;
frac -= out->sec;
} else {
out->sec = 0;
}
sfrac = (out->hrs * 3600LL + out->min * 60LL
+ out->sec) * per_sec;
if (sign < 0)
out->days = -out->days;
ifrac = td - (out->days * per_day + sfrac);
if (ifrac != 0) {
out->ms = ifrac / 1000LL;
ifrac -= out->ms * 1000LL;
out->us = ifrac / 1L;
ifrac -= out->us * 1L;
out->ns = ifrac;
} else {
out->ms = 0;
out->us = 0;
out->ns = 0;
}
break;
case NPY_FR_ms:
per_day = 86400000LL;
per_sec = 1000LL;
// put frac in seconds
if (td < 0 && td % per_sec != 0)
frac = td / per_sec - 1;
else
frac = td / per_sec;
if (frac < 0) {
sign = -1;
// even fraction
if ((-frac % 86400LL) != 0) {
out->days = -frac / 86400LL + 1;
frac += 86400LL * out->days;
} else {
frac = -frac;
}
} else {
sign = 1;
out->days = 0;
}
if (frac >= 86400) {
out->days += frac / 86400LL;
frac -= out->days * 86400LL;
}
if (frac >= 3600) {
out->hrs = frac / 3600LL;
frac -= out->hrs * 3600LL;
} else {
out->hrs = 0;
}
if (frac >= 60) {
out->min = frac / 60LL;
frac -= out->min * 60LL;
} else {
out->min = 0;
}
if (frac >= 0) {
out->sec = frac;
frac -= out->sec;
} else {
out->sec = 0;
}
sfrac = (out->hrs * 3600LL + out->min * 60LL
+ out->sec) * per_sec;
if (sign < 0)
out->days = -out->days;
ifrac = td - (out->days * per_day + sfrac);
if (ifrac != 0) {
out->ms = ifrac;
out->us = 0;
out->ns = 0;
} else {
out->ms = 0;
out->us = 0;
out->ns = 0;
}
break;
case NPY_FR_s:
// special case where we can simplify many expressions bc per_sec=1
per_day = 86400LL;
per_sec = 1L;
// put frac in seconds
if (td < 0 && td % per_sec != 0)
frac = td / per_sec - 1;
else
frac = td / per_sec;
if (frac < 0) {
sign = -1;
// even fraction
if ((-frac % 86400LL) != 0) {
out->days = -frac / 86400LL + 1;
frac += 86400LL * out->days;
} else {
frac = -frac;
}
} else {
sign = 1;
out->days = 0;
}
if (frac >= 86400) {
out->days += frac / 86400LL;
frac -= out->days * 86400LL;
}
if (frac >= 3600) {
out->hrs = frac / 3600LL;
frac -= out->hrs * 3600LL;
} else {
out->hrs = 0;
}
if (frac >= 60) {
out->min = frac / 60LL;
frac -= out->min * 60LL;
} else {
out->min = 0;
}
if (frac >= 0) {
out->sec = frac;
frac -= out->sec;
} else {
out->sec = 0;
}
sfrac = (out->hrs * 3600LL + out->min * 60LL
+ out->sec) * per_sec;
if (sign < 0)
out->days = -out->days;
ifrac = td - (out->days * per_day + sfrac);
if (ifrac != 0) {
out->ms = 0;
out->us = 0;
out->ns = 0;
} else {
out->ms = 0;
out->us = 0;
out->ns = 0;
}
break;
case NPY_FR_m:
out->days = td / 1440LL;
td -= out->days * 1440LL;
out->hrs = td / 60LL;
td -= out->hrs * 60LL;
out->min = td;
out->sec = 0;
out->ms = 0;
out->us = 0;
out->ns = 0;
break;
case NPY_FR_h:
out->days = td / 24LL;
td -= out->days * 24LL;
out->hrs = td;
out->min = 0;
out->sec = 0;
out->ms = 0;
out->us = 0;
out->ns = 0;
break;
case NPY_FR_D:
out->days = td;
out->hrs = 0;
out->min = 0;
out->sec = 0;
out->ms = 0;
out->us = 0;
out->ns = 0;
break;
case NPY_FR_W:
out->days = 7 * td;
out->hrs = 0;
out->min = 0;
out->sec = 0;
out->ms = 0;
out->us = 0;
out->ns = 0;
break;
default:
PyErr_SetString(PyExc_RuntimeError,
"NumPy timedelta metadata is corrupted with "
"invalid base unit");
}
out->seconds = out->hrs * 3600 + out->min * 60 + out->sec;
out->microseconds = out->ms * 1000 + out->us;
out->nanoseconds = out->ns;
}
/*
* This function returns a pointer to the DateTimeMetaData
* contained within the provided datetime dtype.
*
* Copied near-verbatim from numpy/core/src/multiarray/datetime.c
*/
PyArray_DatetimeMetaData
get_datetime_metadata_from_dtype(PyArray_Descr *dtype) {
return (((PyArray_DatetimeDTypeMetaData *)dtype->c_metadata)->meta);
}

View File

@@ -0,0 +1,520 @@
/*
Copyright (c) 2011-2013, ESN Social Software AB and Jonas Tarnstrom
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the ESN Social Software AB nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL ESN SOCIAL SOFTWARE AB OR JONAS TARNSTROM BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Portions of code from MODP_ASCII - Ascii transformations (upper/lower, etc)
https://github.com/client9/stringencoders
Copyright (c) 2007 Nick Galbreath -- nickg [at] modp [dot] com. All rights reserved.
Numeric decoder derived from TCL library
https://www.opensource.apple.com/source/tcl/tcl-14/tcl/license.terms
* Copyright (c) 1988-1993 The Regents of the University of California.
* Copyright (c) 1994 Sun Microsystems, Inc.
*/
#define PY_ARRAY_UNIQUE_SYMBOL UJSON_NUMPY
#define NO_IMPORT_ARRAY
#define PY_SSIZE_T_CLEAN
#include <Python.h>
#include <numpy/arrayobject.h>
#include "pandas/vendored/ujson/lib/ultrajson.h"
#define PRINTMARK()
typedef struct __PyObjectDecoder {
JSONObjectDecoder dec;
void *npyarr; // Numpy context buffer
void *npyarr_addr; // Ref to npyarr ptr to track DECREF calls
npy_intp curdim; // Current array dimension
PyArray_Descr *dtype;
} PyObjectDecoder;
typedef struct __NpyArrContext {
PyObject *ret;
PyObject *labels[2];
PyArray_Dims shape;
PyObjectDecoder *dec;
npy_intp i;
npy_intp elsize;
npy_intp elcount;
} NpyArrContext;
// Numpy handling based on numpy internal code, specifically the function
// PyArray_FromIter.
// numpy related functions are inter-dependent so declare them all here,
// to ensure the compiler catches any errors
// standard numpy array handling
JSOBJ Object_npyNewArray(void *prv, void *decoder);
JSOBJ Object_npyEndArray(void *prv, JSOBJ obj);
int Object_npyArrayAddItem(void *prv, JSOBJ obj, JSOBJ value);
// for more complex dtypes (object and string) fill a standard Python list
// and convert to a numpy array when done.
JSOBJ Object_npyNewArrayList(void *prv, void *decoder);
JSOBJ Object_npyEndArrayList(void *prv, JSOBJ obj);
int Object_npyArrayListAddItem(void *prv, JSOBJ obj, JSOBJ value);
// free the numpy context buffer
void Npy_releaseContext(NpyArrContext *npyarr) {
PRINTMARK();
if (npyarr) {
if (npyarr->shape.ptr) {
PyObject_Free(npyarr->shape.ptr);
}
if (npyarr->dec) {
npyarr->dec->npyarr = NULL;
npyarr->dec->curdim = 0;
}
Py_XDECREF(npyarr->labels[0]);
Py_XDECREF(npyarr->labels[1]);
Py_XDECREF(npyarr->ret);
PyObject_Free(npyarr);
}
}
JSOBJ Object_npyNewArray(void *prv, void *_decoder) {
NpyArrContext *npyarr;
PyObjectDecoder *decoder = (PyObjectDecoder *)_decoder;
PRINTMARK();
if (decoder->curdim <= 0) {
// start of array - initialise the context buffer
npyarr = decoder->npyarr = PyObject_Malloc(sizeof(NpyArrContext));
decoder->npyarr_addr = npyarr;
if (!npyarr) {
PyErr_NoMemory();
return NULL;
}
npyarr->dec = decoder;
npyarr->labels[0] = npyarr->labels[1] = NULL;
npyarr->shape.ptr = PyObject_Malloc(sizeof(npy_intp) * NPY_MAXDIMS);
npyarr->shape.len = 1;
npyarr->ret = NULL;
npyarr->elsize = 0;
npyarr->elcount = 4;
npyarr->i = 0;
} else {
// starting a new dimension continue the current array (and reshape
// after)
npyarr = (NpyArrContext *)decoder->npyarr;
if (decoder->curdim >= npyarr->shape.len) {
npyarr->shape.len++;
}
}
npyarr->shape.ptr[decoder->curdim] = 0;
decoder->curdim++;
return npyarr;
}
PyObject *Npy_returnLabelled(NpyArrContext *npyarr) {
PyObject *ret = npyarr->ret;
npy_intp i;
if (npyarr->labels[0] || npyarr->labels[1]) {
// finished decoding, build tuple with values and labels
ret = PyTuple_New(npyarr->shape.len + 1);
for (i = 0; i < npyarr->shape.len; i++) {
if (npyarr->labels[i]) {
PyTuple_SET_ITEM(ret, i + 1, npyarr->labels[i]);
npyarr->labels[i] = NULL;
} else {
Py_INCREF(Py_None);
PyTuple_SET_ITEM(ret, i + 1, Py_None);
}
}
PyTuple_SET_ITEM(ret, 0, npyarr->ret);
}
return ret;
}
JSOBJ Object_npyEndArray(void *prv, JSOBJ obj) {
PyObject *ret;
char *new_data;
NpyArrContext *npyarr = (NpyArrContext *)obj;
int emptyType = NPY_DEFAULT_TYPE;
npy_intp i;
PRINTMARK();
if (!npyarr) {
return NULL;
}
ret = npyarr->ret;
i = npyarr->i;
npyarr->dec->curdim--;
if (i == 0 || !npyarr->ret) {
// empty array would not have been initialised so do it now.
if (npyarr->dec->dtype) {
emptyType = npyarr->dec->dtype->type_num;
}
npyarr->ret = ret =
PyArray_EMPTY(npyarr->shape.len, npyarr->shape.ptr, emptyType, 0);
} else if (npyarr->dec->curdim <= 0) {
// realloc to final size
new_data = PyDataMem_RENEW(PyArray_DATA(ret), i * npyarr->elsize);
if (new_data == NULL) {
PyErr_NoMemory();
Npy_releaseContext(npyarr);
return NULL;
}
((PyArrayObject *)ret)->data = (void *)new_data;
// PyArray_BYTES(ret) = new_data;
}
if (npyarr->dec->curdim <= 0) {
// finished decoding array, reshape if necessary
if (npyarr->shape.len > 1) {
npyarr->ret = PyArray_Newshape((PyArrayObject *)ret, &npyarr->shape,
NPY_ANYORDER);
Py_DECREF(ret);
}
ret = Npy_returnLabelled(npyarr);
npyarr->ret = NULL;
Npy_releaseContext(npyarr);
}
return ret;
}
int Object_npyArrayAddItem(void *prv, JSOBJ obj, JSOBJ value) {
PyObject *type;
PyArray_Descr *dtype;
npy_intp i;
char *new_data, *item;
NpyArrContext *npyarr = (NpyArrContext *)obj;
PRINTMARK();
if (!npyarr) {
return 0;
}
i = npyarr->i;
npyarr->shape.ptr[npyarr->dec->curdim - 1]++;
if (PyArray_Check((PyObject *)value)) {
// multidimensional array, keep decoding values.
return 1;
}
if (!npyarr->ret) {
// Array not initialised yet.
// We do it here so we can 'sniff' the data type if none was provided
if (!npyarr->dec->dtype) {
type = PyObject_Type(value);
if (!PyArray_DescrConverter(type, &dtype)) {
Py_DECREF(type);
goto fail;
}
Py_INCREF(dtype);
Py_DECREF(type);
} else {
dtype = PyArray_DescrNew(npyarr->dec->dtype);
}
// If it's an object or string then fill a Python list and subsequently
// convert. Otherwise we would need to somehow mess about with
// reference counts when renewing memory.
npyarr->elsize = dtype->elsize;
if (PyDataType_REFCHK(dtype) || npyarr->elsize == 0) {
Py_XDECREF(dtype);
if (npyarr->dec->curdim > 1) {
PyErr_SetString(PyExc_ValueError,
"Cannot decode multidimensional arrays with "
"variable length elements to numpy");
goto fail;
}
npyarr->elcount = 0;
npyarr->ret = PyList_New(0);
if (!npyarr->ret) {
goto fail;
}
((JSONObjectDecoder *)npyarr->dec)->newArray =
Object_npyNewArrayList;
((JSONObjectDecoder *)npyarr->dec)->arrayAddItem =
Object_npyArrayListAddItem;
((JSONObjectDecoder *)npyarr->dec)->endArray =
Object_npyEndArrayList;
return Object_npyArrayListAddItem(prv, obj, value);
}
npyarr->ret = PyArray_NewFromDescr(
&PyArray_Type, dtype, 1, &npyarr->elcount, NULL, NULL, 0, NULL);
if (!npyarr->ret) {
goto fail;
}
}
if (i >= npyarr->elcount) {
// Grow PyArray_DATA(ret):
// this is similar for the strategy for PyListObject, but we use
// 50% overallocation => 0, 4, 8, 14, 23, 36, 56, 86 ...
if (npyarr->elsize == 0) {
PyErr_SetString(PyExc_ValueError,
"Cannot decode multidimensional arrays with "
"variable length elements to numpy");
goto fail;
}
npyarr->elcount = (i >> 1) + (i < 4 ? 4 : 2) + i;
if (npyarr->elcount <= NPY_MAX_INTP / npyarr->elsize) {
new_data = PyDataMem_RENEW(PyArray_DATA(npyarr->ret),
npyarr->elcount * npyarr->elsize);
} else {
PyErr_NoMemory();
goto fail;
}
((PyArrayObject *)npyarr->ret)->data = (void *)new_data;
// PyArray_BYTES(npyarr->ret) = new_data;
}
PyArray_DIMS(npyarr->ret)[0] = i + 1;
if ((item = PyArray_GETPTR1(npyarr->ret, i)) == NULL ||
PyArray_SETITEM(npyarr->ret, item, value) == -1) {
goto fail;
}
Py_DECREF((PyObject *)value);
npyarr->i++;
return 1;
fail:
Npy_releaseContext(npyarr);
return 0;
}
JSOBJ Object_npyNewArrayList(void *prv, void *_decoder) {
PyObjectDecoder *decoder = (PyObjectDecoder *)_decoder;
PRINTMARK();
PyErr_SetString(
PyExc_ValueError,
"nesting not supported for object or variable length dtypes");
Npy_releaseContext(decoder->npyarr);
return NULL;
}
JSOBJ Object_npyEndArrayList(void *prv, JSOBJ obj) {
PyObject *list, *ret;
NpyArrContext *npyarr = (NpyArrContext *)obj;
PRINTMARK();
if (!npyarr) {
return NULL;
}
// convert decoded list to numpy array
list = (PyObject *)npyarr->ret;
npyarr->ret = PyArray_FROM_O(list);
ret = Npy_returnLabelled(npyarr);
npyarr->ret = list;
((JSONObjectDecoder *)npyarr->dec)->newArray = Object_npyNewArray;
((JSONObjectDecoder *)npyarr->dec)->arrayAddItem = Object_npyArrayAddItem;
((JSONObjectDecoder *)npyarr->dec)->endArray = Object_npyEndArray;
Npy_releaseContext(npyarr);
return ret;
}
int Object_npyArrayListAddItem(void *prv, JSOBJ obj, JSOBJ value) {
NpyArrContext *npyarr = (NpyArrContext *)obj;
PRINTMARK();
if (!npyarr) {
return 0;
}
PyList_Append((PyObject *)npyarr->ret, value);
Py_DECREF((PyObject *)value);
npyarr->elcount++;
return 1;
}
int Object_objectAddKey(void *prv, JSOBJ obj, JSOBJ name, JSOBJ value) {
int ret = PyDict_SetItem(obj, name, value);
Py_DECREF((PyObject *)name);
Py_DECREF((PyObject *)value);
return ret == 0 ? 1 : 0;
}
int Object_arrayAddItem(void *prv, JSOBJ obj, JSOBJ value) {
int ret = PyList_Append(obj, value);
Py_DECREF((PyObject *)value);
return ret == 0 ? 1 : 0;
}
JSOBJ Object_newString(void *prv, wchar_t *start, wchar_t *end) {
return PyUnicode_FromWideChar(start, (end - start));
}
JSOBJ Object_newTrue(void *prv) { Py_RETURN_TRUE; }
JSOBJ Object_newFalse(void *prv) { Py_RETURN_FALSE; }
JSOBJ Object_newNull(void *prv) { Py_RETURN_NONE; }
JSOBJ Object_newPosInf(void *prv) { return PyFloat_FromDouble(Py_HUGE_VAL); }
JSOBJ Object_newNegInf(void *prv) { return PyFloat_FromDouble(-Py_HUGE_VAL); }
JSOBJ Object_newObject(void *prv, void *decoder) { return PyDict_New(); }
JSOBJ Object_endObject(void *prv, JSOBJ obj) { return obj; }
JSOBJ Object_newArray(void *prv, void *decoder) { return PyList_New(0); }
JSOBJ Object_endArray(void *prv, JSOBJ obj) { return obj; }
JSOBJ Object_newInteger(void *prv, JSINT32 value) {
return PyLong_FromLong((long)value);
}
JSOBJ Object_newLong(void *prv, JSINT64 value) {
return PyLong_FromLongLong(value);
}
JSOBJ Object_newUnsignedLong(void *prv, JSUINT64 value) {
return PyLong_FromUnsignedLongLong(value);
}
JSOBJ Object_newDouble(void *prv, double value) {
return PyFloat_FromDouble(value);
}
static void Object_releaseObject(void *prv, JSOBJ obj, void *_decoder) {
PyObjectDecoder *decoder = (PyObjectDecoder *)_decoder;
if (obj != decoder->npyarr_addr) {
Py_XDECREF(((PyObject *)obj));
}
}
static char *g_kwlist[] = {"obj", "precise_float",
"labelled", "dtype", NULL};
PyObject *JSONToObj(PyObject *self, PyObject *args, PyObject *kwargs) {
PyObject *ret;
PyObject *sarg;
PyObject *arg;
PyObject *opreciseFloat = NULL;
JSONObjectDecoder *decoder;
PyObjectDecoder pyDecoder;
PyArray_Descr *dtype = NULL;
int labelled = 0;
JSONObjectDecoder dec = {
Object_newString, Object_objectAddKey, Object_arrayAddItem,
Object_newTrue, Object_newFalse, Object_newNull,
Object_newPosInf, Object_newNegInf, Object_newObject,
Object_endObject, Object_newArray, Object_endArray,
Object_newInteger, Object_newLong, Object_newUnsignedLong,
Object_newDouble,
Object_releaseObject, PyObject_Malloc, PyObject_Free,
PyObject_Realloc};
dec.preciseFloat = 0;
dec.prv = NULL;
pyDecoder.dec = dec;
pyDecoder.curdim = 0;
pyDecoder.npyarr = NULL;
pyDecoder.npyarr_addr = NULL;
decoder = (JSONObjectDecoder *)&pyDecoder;
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|OiiO&", g_kwlist, &arg,
&opreciseFloat, &labelled,
PyArray_DescrConverter2, &dtype)) {
Npy_releaseContext(pyDecoder.npyarr);
return NULL;
}
if (opreciseFloat && PyObject_IsTrue(opreciseFloat)) {
decoder->preciseFloat = 1;
}
if (PyBytes_Check(arg)) {
sarg = arg;
} else if (PyUnicode_Check(arg)) {
sarg = PyUnicode_AsUTF8String(arg);
if (sarg == NULL) {
// Exception raised above us by codec according to docs
return NULL;
}
} else {
PyErr_Format(PyExc_TypeError, "Expected 'str' or 'bytes'");
return NULL;
}
decoder->errorStr = NULL;
decoder->errorOffset = NULL;
ret = JSON_DecodeObject(decoder, PyBytes_AS_STRING(sarg),
PyBytes_GET_SIZE(sarg));
if (sarg != arg) {
Py_DECREF(sarg);
}
if (PyErr_Occurred()) {
if (ret) {
Py_DECREF((PyObject *)ret);
}
Npy_releaseContext(pyDecoder.npyarr);
return NULL;
}
if (decoder->errorStr) {
/*
FIXME: It's possible to give a much nicer error message here with actual
failing element in input etc*/
PyErr_Format(PyExc_ValueError, "%s", decoder->errorStr);
if (ret) {
Py_DECREF((PyObject *)ret);
}
Npy_releaseContext(pyDecoder.npyarr);
return NULL;
}
return ret;
}

View File

@@ -0,0 +1,446 @@
/*
Copyright (c) 2011-2013, ESN Social Software AB and Jonas Tarnstrom
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the ESN Social Software AB nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL ESN SOCIAL SOFTWARE AB OR JONAS TARNSTROM BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Portions of code from MODP_ASCII - Ascii transformations (upper/lower, etc)
https://github.com/client9/stringencoders
Copyright (c) 2007 Nick Galbreath -- nickg [at] modp [dot] com. All rights reserved.
Numeric decoder derived from TCL library
https://www.opensource.apple.com/source/tcl/tcl-14/tcl/license.terms
* Copyright (c) 1988-1993 The Regents of the University of California.
* Copyright (c) 1994 Sun Microsystems, Inc.
*/
#include "pandas/vendored/ujson/python/version.h"
#define PY_SSIZE_T_CLEAN
#include <Python.h>
#define PY_ARRAY_UNIQUE_SYMBOL UJSON_NUMPY
#include "numpy/arrayobject.h"
/* objToJSON */
PyObject *objToJSON(PyObject *self, PyObject *args, PyObject *kwargs);
void *initObjToJSON(void);
/* JSONToObj */
PyObject *JSONToObj(PyObject *self, PyObject *args, PyObject *kwargs);
#define ENCODER_HELP_TEXT \
"Use ensure_ascii=false to output UTF-8. Pass in double_precision to " \
"alter the maximum digit precision of doubles. Set " \
"encode_html_chars=True to encode < > & as unicode escape sequences."
static PyMethodDef ujsonMethods[] = {
{"ujson_dumps", (PyCFunction)objToJSON, METH_VARARGS | METH_KEYWORDS,
"Converts arbitrary object recursively into JSON. " ENCODER_HELP_TEXT},
{"ujson_loads", (PyCFunction)JSONToObj, METH_VARARGS | METH_KEYWORDS,
"Converts JSON as string to dict object structure. Use precise_float=True "
"to use high precision float decoder."},
{NULL, NULL, 0, NULL} /* Sentinel */
};
typedef struct {
PyObject *type_decimal;
PyObject *type_dataframe;
PyObject *type_series;
PyObject *type_index;
PyObject *type_nat;
PyObject *type_na;
} modulestate;
#define modulestate(o) ((modulestate *)PyModule_GetState(o))
static int module_traverse(PyObject *m, visitproc visit, void *arg);
static int module_clear(PyObject *m);
static void module_free(void *module);
static struct PyModuleDef moduledef = {.m_base = PyModuleDef_HEAD_INIT,
.m_name = "pandas._libs.json",
.m_methods = ujsonMethods,
.m_size = sizeof(modulestate),
.m_traverse = module_traverse,
.m_clear = module_clear,
.m_free = module_free};
#ifndef PYPY_VERSION
/* Used in objToJSON.c */
int object_is_decimal_type(PyObject *obj) {
PyObject *module = PyState_FindModule(&moduledef);
if (module == NULL)
return 0;
modulestate *state = modulestate(module);
if (state == NULL)
return 0;
PyObject *type_decimal = state->type_decimal;
if (type_decimal == NULL) {
PyErr_Clear();
return 0;
}
int result = PyObject_IsInstance(obj, type_decimal);
if (result == -1) {
PyErr_Clear();
return 0;
}
return result;
}
int object_is_dataframe_type(PyObject *obj) {
PyObject *module = PyState_FindModule(&moduledef);
if (module == NULL)
return 0;
modulestate *state = modulestate(module);
if (state == NULL)
return 0;
PyObject *type_dataframe = state->type_dataframe;
if (type_dataframe == NULL) {
PyErr_Clear();
return 0;
}
int result = PyObject_IsInstance(obj, type_dataframe);
if (result == -1) {
PyErr_Clear();
return 0;
}
return result;
}
int object_is_series_type(PyObject *obj) {
PyObject *module = PyState_FindModule(&moduledef);
if (module == NULL)
return 0;
modulestate *state = modulestate(module);
if (state == NULL)
return 0;
PyObject *type_series = state->type_series;
if (type_series == NULL) {
PyErr_Clear();
return 0;
}
int result = PyObject_IsInstance(obj, type_series);
if (result == -1) {
PyErr_Clear();
return 0;
}
return result;
}
int object_is_index_type(PyObject *obj) {
PyObject *module = PyState_FindModule(&moduledef);
if (module == NULL)
return 0;
modulestate *state = modulestate(module);
if (state == NULL)
return 0;
PyObject *type_index = state->type_index;
if (type_index == NULL) {
PyErr_Clear();
return 0;
}
int result = PyObject_IsInstance(obj, type_index);
if (result == -1) {
PyErr_Clear();
return 0;
}
return result;
}
int object_is_nat_type(PyObject *obj) {
PyObject *module = PyState_FindModule(&moduledef);
if (module == NULL)
return 0;
modulestate *state = modulestate(module);
if (state == NULL)
return 0;
PyObject *type_nat = state->type_nat;
if (type_nat == NULL) {
PyErr_Clear();
return 0;
}
int result = PyObject_IsInstance(obj, type_nat);
if (result == -1) {
PyErr_Clear();
return 0;
}
return result;
}
int object_is_na_type(PyObject *obj) {
PyObject *module = PyState_FindModule(&moduledef);
if (module == NULL)
return 0;
modulestate *state = modulestate(module);
if (state == NULL)
return 0;
PyObject *type_na = state->type_na;
if (type_na == NULL) {
PyErr_Clear();
return 0;
}
int result = PyObject_IsInstance(obj, type_na);
if (result == -1) {
PyErr_Clear();
return 0;
}
return result;
}
#else
/* Used in objToJSON.c */
int object_is_decimal_type(PyObject *obj) {
PyObject *module = PyImport_ImportModule("decimal");
if (module == NULL) {
PyErr_Clear();
return 0;
}
PyObject *type_decimal = PyObject_GetAttrString(module, "Decimal");
if (type_decimal == NULL) {
Py_DECREF(module);
PyErr_Clear();
return 0;
}
int result = PyObject_IsInstance(obj, type_decimal);
if (result == -1) {
Py_DECREF(module);
Py_DECREF(type_decimal);
PyErr_Clear();
return 0;
}
return result;
}
int object_is_dataframe_type(PyObject *obj) {
PyObject *module = PyImport_ImportModule("pandas");
if (module == NULL) {
PyErr_Clear();
return 0;
}
PyObject *type_dataframe = PyObject_GetAttrString(module, "DataFrame");
if (type_dataframe == NULL) {
Py_DECREF(module);
PyErr_Clear();
return 0;
}
int result = PyObject_IsInstance(obj, type_dataframe);
if (result == -1) {
Py_DECREF(module);
Py_DECREF(type_dataframe);
PyErr_Clear();
return 0;
}
return result;
}
int object_is_series_type(PyObject *obj) {
PyObject *module = PyImport_ImportModule("pandas");
if (module == NULL) {
PyErr_Clear();
return 0;
}
PyObject *type_series = PyObject_GetAttrString(module, "Series");
if (type_series == NULL) {
Py_DECREF(module);
PyErr_Clear();
return 0;
}
int result = PyObject_IsInstance(obj, type_series);
if (result == -1) {
Py_DECREF(module);
Py_DECREF(type_series);
PyErr_Clear();
return 0;
}
return result;
}
int object_is_index_type(PyObject *obj) {
PyObject *module = PyImport_ImportModule("pandas");
if (module == NULL) {
PyErr_Clear();
return 0;
}
PyObject *type_index = PyObject_GetAttrString(module, "Index");
if (type_index == NULL) {
Py_DECREF(module);
PyErr_Clear();
return 0;
}
int result = PyObject_IsInstance(obj, type_index);
if (result == -1) {
Py_DECREF(module);
Py_DECREF(type_index);
PyErr_Clear();
return 0;
}
return result;
}
int object_is_nat_type(PyObject *obj) {
PyObject *module = PyImport_ImportModule("pandas._libs.tslibs.nattype");
if (module == NULL) {
PyErr_Clear();
return 0;
}
PyObject *type_nat = PyObject_GetAttrString(module, "NaTType");
if (type_nat == NULL) {
Py_DECREF(module);
PyErr_Clear();
return 0;
}
int result = PyObject_IsInstance(obj, type_nat);
if (result == -1) {
Py_DECREF(module);
Py_DECREF(type_nat);
PyErr_Clear();
return 0;
}
return result;
}
int object_is_na_type(PyObject *obj) {
PyObject *module = PyImport_ImportModule("pandas._libs.missing");
if (module == NULL) {
PyErr_Clear();
return 0;
}
PyObject *type_na = PyObject_GetAttrString(module, "NAType");
if (type_na == NULL) {
Py_DECREF(module);
PyErr_Clear();
return 0;
}
int result = PyObject_IsInstance(obj, type_na);
if (result == -1) {
Py_DECREF(module);
Py_DECREF(type_na);
PyErr_Clear();
return 0;
}
return result;
}
#endif
static int module_traverse(PyObject *m, visitproc visit, void *arg) {
Py_VISIT(modulestate(m)->type_decimal);
Py_VISIT(modulestate(m)->type_dataframe);
Py_VISIT(modulestate(m)->type_series);
Py_VISIT(modulestate(m)->type_index);
Py_VISIT(modulestate(m)->type_nat);
Py_VISIT(modulestate(m)->type_na);
return 0;
}
static int module_clear(PyObject *m) {
Py_CLEAR(modulestate(m)->type_decimal);
Py_CLEAR(modulestate(m)->type_dataframe);
Py_CLEAR(modulestate(m)->type_series);
Py_CLEAR(modulestate(m)->type_index);
Py_CLEAR(modulestate(m)->type_nat);
Py_CLEAR(modulestate(m)->type_na);
return 0;
}
static void module_free(void *module) { module_clear((PyObject *)module); }
PyMODINIT_FUNC PyInit_json(void) {
import_array()
PyObject *module;
#ifndef PYPY_VERSION
// This function is not supported in PyPy.
if ((module = PyState_FindModule(&moduledef)) != NULL) {
Py_INCREF(module);
return module;
}
#endif
module = PyModule_Create(&moduledef);
if (module == NULL) {
return NULL;
}
#ifndef PYPY_VERSION
PyObject *mod_decimal = PyImport_ImportModule("decimal");
if (mod_decimal) {
PyObject *type_decimal = PyObject_GetAttrString(mod_decimal, "Decimal");
assert(type_decimal != NULL);
modulestate(module)->type_decimal = type_decimal;
Py_DECREF(mod_decimal);
}
PyObject *mod_pandas = PyImport_ImportModule("pandas");
if (mod_pandas) {
PyObject *type_dataframe =
PyObject_GetAttrString(mod_pandas, "DataFrame");
assert(type_dataframe != NULL);
modulestate(module)->type_dataframe = type_dataframe;
PyObject *type_series = PyObject_GetAttrString(mod_pandas, "Series");
assert(type_series != NULL);
modulestate(module)->type_series = type_series;
PyObject *type_index = PyObject_GetAttrString(mod_pandas, "Index");
assert(type_index != NULL);
modulestate(module)->type_index = type_index;
Py_DECREF(mod_pandas);
}
PyObject *mod_nattype =
PyImport_ImportModule("pandas._libs.tslibs.nattype");
if (mod_nattype) {
PyObject *type_nat = PyObject_GetAttrString(mod_nattype, "NaTType");
assert(type_nat != NULL);
modulestate(module)->type_nat = type_nat;
Py_DECREF(mod_nattype);
}
PyObject *mod_natype = PyImport_ImportModule("pandas._libs.missing");
if (mod_natype) {
PyObject *type_na = PyObject_GetAttrString(mod_natype, "NAType");
assert(type_na != NULL);
modulestate(module)->type_na = type_na;
Py_DECREF(mod_natype);
} else {
PyErr_Clear();
}
#endif
/* Not vendored for now
JSONDecodeError = PyErr_NewException("ujson.JSONDecodeError",
PyExc_ValueError, NULL); Py_XINCREF(JSONDecodeError); if
(PyModule_AddObject(module, "JSONDecodeError", JSONDecodeError) < 0)
{
Py_XDECREF(JSONDecodeError);
Py_CLEAR(JSONDecodeError);
Py_DECREF(module);
return NULL;
}
*/
return module;
}