Skip to content

Commit 81be544

Browse files
committed
rename internal modules
1 parent addefa0 commit 81be544

24 files changed

+257
-264
lines changed

pandas/core/categorical.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66

77
from pandas import compat
88
from pandas.compat import u, lzip
9-
from pandas.libs import lib, algos as _algos
9+
from pandas.libs import lib, libalgos
1010

1111
from pandas.types.generic import ABCSeries, ABCIndexClass, ABCCategoricalIndex
1212
from pandas.types.missing import isnull, notnull
@@ -1817,8 +1817,8 @@ def _reverse_indexer(self):
18171817
18181818
"""
18191819
categories = self.categories
1820-
r, counts = _algos.groupsort_indexer(self.codes.astype('int64'),
1821-
categories.size)
1820+
r, counts = libalgos.groupsort_indexer(self.codes.astype('int64'),
1821+
categories.size)
18221822
counts = counts.cumsum()
18231823
result = [r[counts[indexer]:counts[indexer + 1]]
18241824
for indexer in range(len(counts) - 1)]

pandas/core/frame.py

+14-15
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@
7171
from pandas.core.series import Series
7272
from pandas.core.categorical import Categorical
7373
import pandas.computation.expressions as expressions
74-
import pandas.core.algorithms as algos
74+
import pandas.core.algorithms as algorithms
7575
from pandas.computation.eval import eval as _eval
7676
from pandas.compat import (range, map, zip, lrange, lmap, lzip, StringIO, u,
7777
OrderedDict, raise_with_traceback)
@@ -93,7 +93,7 @@
9393
from pandas.formats.printing import pprint_thing
9494
import pandas.tools.plotting as gfx
9595

96-
from pandas.libs import lib, algos as _algos
96+
from pandas.libs import lib, libalgos
9797

9898
from pandas.core.config import get_option
9999

@@ -2775,8 +2775,8 @@ def _reindex_multi(self, axes, copy, fill_value):
27752775

27762776
if row_indexer is not None and col_indexer is not None:
27772777
indexer = row_indexer, col_indexer
2778-
new_values = algos.take_2d_multi(self.values, indexer,
2779-
fill_value=fill_value)
2778+
new_values = algorithms.take_2d_multi(self.values, indexer,
2779+
fill_value=fill_value)
27802780
return self._constructor(new_values, index=new_index,
27812781
columns=new_columns)
27822782
else:
@@ -3164,9 +3164,8 @@ def duplicated(self, subset=None, keep='first'):
31643164
from pandas.libs.hashtable import duplicated_int64, _SIZE_HINT_LIMIT
31653165

31663166
def f(vals):
3167-
labels, shape = algos.factorize(vals,
3168-
size_hint=min(len(self),
3169-
_SIZE_HINT_LIMIT))
3167+
labels, shape = algorithms.factorize(
3168+
vals, size_hint=min(len(self), _SIZE_HINT_LIMIT))
31703169
return labels.astype('i8', copy=False), len(shape)
31713170

31723171
if subset is None:
@@ -3418,7 +3417,7 @@ def nlargest(self, n, columns, keep='first'):
34183417
1 10 b 2
34193418
2 8 d NaN
34203419
"""
3421-
return algos.select_n_frame(self, columns, n, 'nlargest', keep)
3420+
return algorithms.select_n_frame(self, columns, n, 'nlargest', keep)
34223421

34233422
def nsmallest(self, n, columns, keep='first'):
34243423
"""Get the rows of a DataFrame sorted by the `n` smallest
@@ -3452,7 +3451,7 @@ def nsmallest(self, n, columns, keep='first'):
34523451
0 1 a 1
34533452
2 8 d NaN
34543453
"""
3455-
return algos.select_n_frame(self, columns, n, 'nsmallest', keep)
3454+
return algorithms.select_n_frame(self, columns, n, 'nsmallest', keep)
34563455

34573456
def swaplevel(self, i=-2, j=-1, axis=0):
34583457
"""
@@ -4720,10 +4719,10 @@ def corr(self, method='pearson', min_periods=1):
47204719
mat = numeric_df.values
47214720

47224721
if method == 'pearson':
4723-
correl = _algos.nancorr(_ensure_float64(mat), minp=min_periods)
4722+
correl = libalgos.nancorr(_ensure_float64(mat), minp=min_periods)
47244723
elif method == 'spearman':
4725-
correl = _algos.nancorr_spearman(_ensure_float64(mat),
4726-
minp=min_periods)
4724+
correl = libalgos.nancorr_spearman(_ensure_float64(mat),
4725+
minp=min_periods)
47274726
else:
47284727
if min_periods is None:
47294728
min_periods = 1
@@ -4783,8 +4782,8 @@ def cov(self, min_periods=None):
47834782
baseCov = np.cov(mat.T)
47844783
baseCov = baseCov.reshape((len(cols), len(cols)))
47854784
else:
4786-
baseCov = _algos.nancorr(_ensure_float64(mat), cov=True,
4787-
minp=min_periods)
4785+
baseCov = libalgos.nancorr(_ensure_float64(mat), cov=True,
4786+
minp=min_periods)
47884787

47894788
return self._constructor(baseCov, index=idx, columns=cols)
47904789

@@ -5650,7 +5649,7 @@ def _list_of_series_to_arrays(data, columns, coerce_float=False, dtype=None):
56505649
indexer = indexer_cache[id(index)] = index.get_indexer(columns)
56515650

56525651
values = _values_from_object(s)
5653-
aligned_values.append(algos.take_1d(values, indexer))
5652+
aligned_values.append(algorithms.take_1d(values, indexer))
56545653

56555654
values = np.vstack(aligned_values)
56565655

pandas/core/groupby.py

+20-20
Original file line numberDiff line numberDiff line change
@@ -55,11 +55,11 @@
5555
from pandas.formats.printing import pprint_thing
5656
from pandas.util.validators import validate_kwargs
5757

58-
import pandas.core.algorithms as algos
58+
import pandas.core.algorithms as algorithms
5959
import pandas.core.common as com
6060
from pandas.core.config import option_context
6161

62-
from pandas.libs import lib, tslib, algos as _algos
62+
from pandas.libs import lib, libts, libalgos, NaT, iNaT
6363
from pandas.libs.lib import Timestamp, count_level_2d
6464

6565
_doc_template = """
@@ -1473,11 +1473,11 @@ def shift(self, periods=1, freq=None, axis=0):
14731473

14741474
# filled in by Cython
14751475
indexer = np.zeros_like(labels)
1476-
_algos.group_shift_indexer(indexer, labels, ngroups, periods)
1476+
libalgos.group_shift_indexer(indexer, labels, ngroups, periods)
14771477

14781478
output = {}
14791479
for name, obj in self._iterate_slices():
1480-
output[name] = algos.take_nd(obj.values, indexer)
1480+
output[name] = algorithms.take_nd(obj.values, indexer)
14811481

14821482
return self._wrap_transformed_output(output)
14831483

@@ -1814,13 +1814,13 @@ def _get_cython_function(self, kind, how, values, is_numeric):
18141814
def get_func(fname):
18151815
# see if there is a fused-type version of function
18161816
# only valid for numeric
1817-
f = getattr(_algos, fname, None)
1817+
f = getattr(libalgos, fname, None)
18181818
if f is not None and is_numeric:
18191819
return f
18201820

18211821
# otherwise find dtype-specific version, falling back to object
18221822
for dt in [dtype_str, 'object']:
1823-
f = getattr(_algos, "%s_%s" % (fname, dtype_str), None)
1823+
f = getattr(libalgos, "%s_%s" % (fname, dtype_str), None)
18241824
if f is not None:
18251825
return f
18261826

@@ -1900,7 +1900,7 @@ def _cython_operation(self, kind, values, how, axis):
19001900
elif is_integer_dtype(values):
19011901
# we use iNaT for the missing value on ints
19021902
# so pre-convert to guard this condition
1903-
if (values == tslib.iNaT).any():
1903+
if (values == iNaT).any():
19041904
values = _ensure_float64(values)
19051905
else:
19061906
values = values.astype('int64', copy=False)
@@ -1942,7 +1942,7 @@ def _cython_operation(self, kind, values, how, axis):
19421942
result, values, labels, func, is_numeric, is_datetimelike)
19431943

19441944
if is_integer_dtype(result):
1945-
mask = result == tslib.iNaT
1945+
mask = result == iNaT
19461946
if mask.any():
19471947
result = result.astype('float64')
19481948
result[mask] = np.nan
@@ -2033,7 +2033,7 @@ def _aggregate_series_fast(self, obj, func):
20332033
dummy = obj._get_values(slice(None, 0)).to_dense()
20342034
indexer = get_group_index_sorter(group_index, ngroups)
20352035
obj = obj.take(indexer, convert=False)
2036-
group_index = algos.take_nd(group_index, indexer, allow_fill=False)
2036+
group_index = algorithms.take_nd(group_index, indexer, allow_fill=False)
20372037
grouper = lib.SeriesGrouper(obj, func, group_index, ngroups,
20382038
dummy)
20392039
result, counts = grouper.get_result()
@@ -2131,7 +2131,7 @@ def groups(self):
21312131
# GH 3881
21322132
result = {}
21332133
for key, value in zip(self.binlabels, self.bins):
2134-
if key is not tslib.NaT:
2134+
if key is not NaT:
21352135
result[key] = value
21362136
return result
21372137

@@ -2158,7 +2158,7 @@ def get_iterator(self, data, axis=0):
21582158

21592159
start = 0
21602160
for edge, label in zip(self.bins, self.binlabels):
2161-
if label is not tslib.NaT:
2161+
if label is not NaT:
21622162
yield label, slicer(start, edge)
21632163
start = edge
21642164

@@ -2172,7 +2172,7 @@ def indices(self):
21722172
i = 0
21732173
for label, bin in zip(self.binlabels, self.bins):
21742174
if i < bin:
2175-
if label is not tslib.NaT:
2175+
if label is not NaT:
21762176
indices[label] = list(range(i, bin))
21772177
i = bin
21782178
return indices
@@ -2382,7 +2382,7 @@ def group_index(self):
23822382

23832383
def _make_labels(self):
23842384
if self._labels is None or self._group_index is None:
2385-
labels, uniques = algos.factorize(self.grouper, sort=self.sort)
2385+
labels, uniques = algorithms.factorize(self.grouper, sort=self.sort)
23862386
uniques = Index(uniques, name=self.name)
23872387
self._labels = labels
23882388
self._group_index = uniques
@@ -2927,7 +2927,7 @@ def _transform_fast(self, func):
29272927

29282928
ids, _, ngroup = self.grouper.group_info
29292929
cast = (self.size().fillna(0) > 0).any()
2930-
out = algos.take_1d(func().values, ids)
2930+
out = algorithms.take_1d(func().values, ids)
29312931
if cast:
29322932
out = self._try_cast(out, self.obj)
29332933
return Series(out, index=self.obj.index, name=self.obj.name)
@@ -2984,7 +2984,7 @@ def nunique(self, dropna=True):
29842984
except TypeError: # catches object dtypes
29852985
assert val.dtype == object, \
29862986
'val.dtype must be object, got %s' % val.dtype
2987-
val, _ = algos.factorize(val, sort=False)
2987+
val, _ = algorithms.factorize(val, sort=False)
29882988
sorter = np.lexsort((val, ids))
29892989
_isnull = lambda a: a == -1
29902990
else:
@@ -3068,7 +3068,7 @@ def value_counts(self, normalize=False, sort=True, ascending=False,
30683068
ids, val = ids[mask], val[mask]
30693069

30703070
if bins is None:
3071-
lab, lev = algos.factorize(val, sort=True)
3071+
lab, lev = algorithms.factorize(val, sort=True)
30723072
else:
30733073
cat, bins = cut(val, bins, retbins=True)
30743074
# bins[:-1] for backward compat;
@@ -3107,7 +3107,7 @@ def value_counts(self, normalize=False, sort=True, ascending=False,
31073107
if dropna:
31083108
m = ids[lab == -1]
31093109
if _np_version_under1p8:
3110-
mi, ml = algos.factorize(m)
3110+
mi, ml = algorithms.factorize(m)
31113111
d[ml] = d[ml] - np.bincount(mi)
31123112
else:
31133113
np.add.at(d, m, -1)
@@ -3129,7 +3129,7 @@ def value_counts(self, normalize=False, sort=True, ascending=False,
31293129
out = _ensure_int64(out)
31303130
return Series(out, index=mi, name=self.name)
31313131

3132-
# for compat. with algos.value_counts need to ensure every
3132+
# for compat. with libalgos.value_counts need to ensure every
31333133
# bin is present at every index level, null filled with zeros
31343134
diff = np.zeros(len(out), dtype='bool')
31353135
for lab in labels[:-1]:
@@ -3700,7 +3700,7 @@ def _transform_fast(self, result, obj):
37003700
ids, _, ngroup = self.grouper.group_info
37013701
output = []
37023702
for i, _ in enumerate(result.columns):
3703-
res = algos.take_1d(result.iloc[:, i].values, ids)
3703+
res = algorithms.take_1d(result.iloc[:, i].values, ids)
37043704
if cast:
37053705
res = self._try_cast(res, obj.iloc[:, i])
37063706
output.append(res)
@@ -4188,7 +4188,7 @@ def __init__(self, data, labels, ngroups, axis=0):
41884188
@cache_readonly
41894189
def slabels(self):
41904190
# Sorted labels
4191-
return algos.take_nd(self.labels, self.sort_idx, allow_fill=False)
4191+
return algorithms.take_nd(self.labels, self.sort_idx, allow_fill=False)
41924192

41934193
@cache_readonly
41944194
def sort_idx(self):

pandas/core/ops.py

+7-8
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,7 @@
1111
import pandas as pd
1212
import datetime
1313

14-
from pandas.libs import lib, index as _index, tslib, algos as _algos
15-
from pandas.libs.tslib import iNaT
14+
from pandas.libs import lib, libindex, libts, libalgos, iNaT
1615

1716
from pandas import compat
1817
from pandas.util.decorators import Appender
@@ -479,7 +478,7 @@ def _convert_to_array(self, values, name=None, other=None):
479478
values = values._values
480479
elif not (isinstance(values, (np.ndarray, ABCSeries)) and
481480
is_datetime64_dtype(values)):
482-
values = tslib.array_to_datetime(values)
481+
values = libts.array_to_datetime(values)
483482
elif inferred_type in ('timedelta', 'timedelta64'):
484483
# have a timedelta, convert to to ns here
485484
values = to_timedelta(values, errors='coerce', box=False)
@@ -683,12 +682,12 @@ def safe_na_op(lvalues, rvalues):
683682
if isinstance(rvalues, ABCSeries):
684683
if is_object_dtype(rvalues):
685684
# if dtype is object, try elementwise op
686-
return _algos.arrmap_object(rvalues,
687-
lambda x: op(lvalues, x))
685+
return libalgos.arrmap_object(rvalues,
686+
lambda x: op(lvalues, x))
688687
else:
689688
if is_object_dtype(lvalues):
690-
return _algos.arrmap_object(lvalues,
691-
lambda x: op(x, rvalues))
689+
return libalgos.arrmap_object(lvalues,
690+
lambda x: op(x, rvalues))
692691
raise
693692

694693
def wrapper(left, right, name=name, na_op=na_op):
@@ -786,7 +785,7 @@ def na_op(x, y):
786785

787786
if is_scalar(y):
788787
mask = isnull(x)
789-
y = _index.convert_scalar(x, _values_from_object(y))
788+
y = libindex.convert_scalar(x, _values_from_object(y))
790789
else:
791790
mask = isnull(x) | isnull(y)
792791
y = y.view('i8')

0 commit comments

Comments
 (0)