1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
|
"""
pint.compat
~~~~~~~~~~~
Compatibility layer.
:copyright: 2013 by Pint Authors, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import annotations
import math
import tokenize
from decimal import Decimal
from importlib import import_module
from io import BytesIO
from numbers import Number
from collections.abc import Mapping
from typing import Any, NoReturn, Callable
from collections.abc import Generator, Iterable
def missing_dependency(
package: str, display_name: str | None = None
) -> Callable[..., NoReturn]:
"""Return a helper function that raises an exception when used.
It provides a way delay a missing dependency exception until it is used.
"""
display_name = display_name or package
def _inner(*args: Any, **kwargs: Any) -> NoReturn:
raise Exception(
"This feature requires %s. Please install it by running:\n"
"pip install %s" % (display_name, package)
)
return _inner
def tokenizer(input_string: str) -> Generator[tokenize.TokenInfo, None, None]:
"""Tokenize an input string, encoded as UTF-8
and skipping the ENCODING token.
See Also
--------
tokenize.tokenize
"""
for tokinfo in tokenize.tokenize(BytesIO(input_string.encode("utf-8")).readline):
if tokinfo.type != tokenize.ENCODING:
yield tokinfo
# TODO: remove this warning after v0.10
class BehaviorChangeWarning(UserWarning):
pass
try:
import numpy as np
from numpy import datetime64 as np_datetime64
from numpy import ndarray
HAS_NUMPY = True
NUMPY_VER = np.__version__
NUMERIC_TYPES = (Number, Decimal, ndarray, np.number)
def _to_magnitude(value, force_ndarray=False, force_ndarray_like=False):
if isinstance(value, (dict, bool)) or value is None:
raise TypeError(f"Invalid magnitude for Quantity: {value!r}")
elif isinstance(value, str) and value == "":
raise ValueError("Quantity magnitude cannot be an empty string.")
elif isinstance(value, (list, tuple)):
return np.asarray(value)
if force_ndarray or (
force_ndarray_like and not is_duck_array_type(type(value))
):
return np.asarray(value)
return value
def _test_array_function_protocol():
# Test if the __array_function__ protocol is enabled
try:
class FakeArray:
def __array_function__(self, *args, **kwargs):
return
np.concatenate([FakeArray()])
return True
except ValueError:
return False
HAS_NUMPY_ARRAY_FUNCTION = _test_array_function_protocol()
NP_NO_VALUE = np._NoValue
except ImportError:
np = None
class ndarray:
pass
class np_datetime64:
pass
HAS_NUMPY = False
NUMPY_VER = "0"
NUMERIC_TYPES = (Number, Decimal)
HAS_NUMPY_ARRAY_FUNCTION = False
NP_NO_VALUE = None
def _to_magnitude(value, force_ndarray=False, force_ndarray_like=False):
if force_ndarray or force_ndarray_like:
raise ValueError(
"Cannot force to ndarray or ndarray-like when NumPy is not present."
)
elif isinstance(value, (dict, bool)) or value is None:
raise TypeError(f"Invalid magnitude for Quantity: {value!r}")
elif isinstance(value, str) and value == "":
raise ValueError("Quantity magnitude cannot be an empty string.")
elif isinstance(value, (list, tuple)):
raise TypeError(
"lists and tuples are valid magnitudes for "
"Quantity only when NumPy is present."
)
return value
try:
from uncertainties import ufloat
HAS_UNCERTAINTIES = True
except ImportError:
ufloat = None
HAS_UNCERTAINTIES = False
try:
from babel import Locale as Loc
from babel import units as babel_units
babel_parse = Loc.parse
HAS_BABEL = hasattr(babel_units, "format_unit")
except ImportError:
HAS_BABEL = False
try:
import mip
mip_model = mip.model
mip_Model = mip.Model
mip_INF = mip.INF
mip_INTEGER = mip.INTEGER
mip_xsum = mip.xsum
mip_OptimizationStatus = mip.OptimizationStatus
HAS_MIP = True
except ImportError:
HAS_MIP = False
# Defines Logarithm and Exponential for Logarithmic Converter
if HAS_NUMPY:
from numpy import exp # noqa: F401
from numpy import log # noqa: F401
else:
from math import exp # noqa: F401
from math import log # noqa: F401
if not HAS_BABEL:
babel_parse = missing_dependency("Babel") # noqa: F811
babel_units = babel_parse
if not HAS_MIP:
mip_missing = missing_dependency("mip")
mip_model = mip_missing
mip_Model = mip_missing
mip_INF = mip_missing
mip_INTEGER = mip_missing
mip_xsum = mip_missing
mip_OptimizationStatus = mip_missing
# Define location of pint.Quantity in NEP-13 type cast hierarchy by defining upcast
# types using guarded imports
try:
from dask import array as dask_array
from dask.base import compute, persist, visualize
except ImportError:
compute, persist, visualize = None, None, None
dask_array = None
# TODO: merge with upcast_type_map
#: List upcast type names
upcast_type_names = (
"pint_pandas.PintArray",
"pandas.Series",
"xarray.core.dataarray.DataArray",
"xarray.core.dataset.Dataset",
"xarray.core.variable.Variable",
"pandas.core.series.Series",
"xarray.core.dataarray.DataArray",
)
#: Map type name to the actual type (for upcast types).
upcast_type_map: Mapping[str, type | None] = {k: None for k in upcast_type_names}
def fully_qualified_name(t: type) -> str:
"""Return the fully qualified name of a type."""
module = t.__module__
name = t.__qualname__
if module is None or module == "builtins":
return name
return f"{module}.{name}"
def check_upcast_type(obj: type) -> bool:
"""Check if the type object is an upcast type."""
# TODO: merge or unify name with is_upcast_type
fqn = fully_qualified_name(obj)
if fqn not in upcast_type_map:
return False
else:
module_name, class_name = fqn.rsplit(".", 1)
cls = getattr(import_module(module_name), class_name)
upcast_type_map[fqn] = cls
# This is to check we are importing the same thing.
# and avoid weird problems. Maybe instead of return
# we should raise an error if false.
return obj in upcast_type_map.values()
def is_upcast_type(other: type) -> bool:
"""Check if the type object is an upcast type."""
# TODO: merge or unify name with check_upcast_type
if other in upcast_type_map.values():
return True
return check_upcast_type(other)
def is_duck_array_type(cls: type) -> bool:
"""Check if the type object represents a (non-Quantity) duck array type."""
# TODO (NEP 30): replace duck array check with hasattr(other, "__duckarray__")
return issubclass(cls, ndarray) or (
not hasattr(cls, "_magnitude")
and not hasattr(cls, "_units")
and HAS_NUMPY_ARRAY_FUNCTION
and hasattr(cls, "__array_function__")
and hasattr(cls, "ndim")
and hasattr(cls, "dtype")
)
def is_duck_array(obj: type) -> bool:
"""Check if an object represents a (non-Quantity) duck array type."""
return is_duck_array_type(type(obj))
def eq(lhs: Any, rhs: Any, check_all: bool) -> bool | Iterable[bool]:
"""Comparison of scalars and arrays.
Parameters
----------
lhs
left-hand side
rhs
right-hand side
check_all
if True, reduce sequence to single bool;
return True if all the elements are equal.
Returns
-------
bool or array_like of bool
"""
out = lhs == rhs
if check_all and is_duck_array_type(type(out)):
return out.all()
return out
def isnan(obj: Any, check_all: bool) -> bool | Iterable[bool]:
"""Test for NaN or NaT.
Parameters
----------
obj
scalar or vector
check_all
if True, reduce sequence to single bool;
return True if any of the elements are NaN.
Returns
-------
bool or array_like of bool.
Always return False for non-numeric types.
"""
if is_duck_array_type(type(obj)):
if obj.dtype.kind in "if":
out = np.isnan(obj)
elif obj.dtype.kind in "Mm":
out = np.isnat(obj)
else:
# Not a numeric or datetime type
out = np.full(obj.shape, False)
return out.any() if check_all else out
if isinstance(obj, np_datetime64):
return np.isnat(obj)
try:
return math.isnan(obj)
except TypeError:
return False
def zero_or_nan(obj: Any, check_all: bool) -> bool | Iterable[bool]:
"""Test if obj is zero, NaN, or NaT.
Parameters
----------
obj
scalar or vector
check_all
if True, reduce sequence to single bool;
return True if all the elements are zero, NaN, or NaT.
Returns
-------
bool or array_like of bool.
Always return False for non-numeric types.
"""
out = eq(obj, 0, False) + isnan(obj, False)
if check_all and is_duck_array_type(type(out)):
return out.all()
return out
|