@@ -666,12 +666,10 @@ def open(self, mode: str = "a", **kwargs):
666
666
tables = _tables ()
667
667
668
668
if self ._mode != mode :
669
-
670
669
# if we are changing a write mode to read, ok
671
670
if self ._mode in ["a" , "w" ] and mode in ["r" , "r+" ]:
672
671
pass
673
672
elif mode in ["w" ]:
674
-
675
673
# this would truncate, raise here
676
674
if self .is_open :
677
675
raise PossibleDataLossError (
@@ -700,7 +698,6 @@ def open(self, mode: str = "a", **kwargs):
700
698
raise
701
699
702
700
except ValueError as err :
703
-
704
701
# trap PyTables >= 3.1 FILE_OPEN_POLICY exception
705
702
# to provide an updated message
706
703
if "FILE_OPEN_POLICY" in str (err ):
@@ -715,11 +712,9 @@ def open(self, mode: str = "a", **kwargs):
715
712
"which allows\n "
716
713
"files to be opened multiple times at once\n "
717
714
)
718
-
719
715
raise err
720
716
721
717
except Exception as err :
722
-
723
718
# trying to read from a non-existent file causes an error which
724
719
# is not part of IOError, make it one
725
720
if self ._mode == "r" and "Unable to open/create file" in str (err ):
@@ -1646,7 +1641,6 @@ def error(t):
1646
1641
# infer the pt from the passed value
1647
1642
if pt is None :
1648
1643
if value is None :
1649
-
1650
1644
_tables ()
1651
1645
assert _table_mod is not None # for mypy
1652
1646
if getattr (group , "table" , None ) or isinstance (
@@ -1678,10 +1672,8 @@ def error(t):
1678
1672
1679
1673
# existing node (and must be a table)
1680
1674
if tt is None :
1681
-
1682
1675
# if we are a writer, determine the tt
1683
1676
if value is not None :
1684
-
1685
1677
if pt == "series_table" :
1686
1678
index = getattr (value , "index" , None )
1687
1679
if index is not None :
@@ -1886,11 +1878,9 @@ def __init__(
1886
1878
self .auto_close = auto_close
1887
1879
1888
1880
def __iter__ (self ):
1889
-
1890
1881
# iterate
1891
1882
current = self .start
1892
1883
while current < self .stop :
1893
-
1894
1884
stop = min (current + self .chunksize , self .stop )
1895
1885
value = self .func (None , None , self .coordinates [current :stop ])
1896
1886
current = stop
@@ -1906,7 +1896,6 @@ def close(self):
1906
1896
self .store .close ()
1907
1897
1908
1898
def get_result (self , coordinates : bool = False ):
1909
-
1910
1899
# return the actual iterator
1911
1900
if self .chunksize is not None :
1912
1901
if not isinstance (self .s , Table ):
@@ -2105,7 +2094,6 @@ def maybe_set_size(self, min_itemsize=None):
2105
2094
with an integer size
2106
2095
"""
2107
2096
if _ensure_decoded (self .kind ) == "string" :
2108
-
2109
2097
if isinstance (min_itemsize , dict ):
2110
2098
min_itemsize = min_itemsize .get (self .name )
2111
2099
@@ -2163,7 +2151,6 @@ def update_info(self, info):
2163
2151
2164
2152
existing_value = idx .get (key )
2165
2153
if key in idx and value is not None and existing_value != value :
2166
-
2167
2154
# frequency/name just warn
2168
2155
if key in ["freq" , "index_name" ]:
2169
2156
ws = attribute_conflict_doc % (key , existing_value , value )
@@ -2356,10 +2343,8 @@ def _get_atom(cls, values: ArrayLike) -> "Col":
2356
2343
atom = cls .get_atom_timedelta64 (shape )
2357
2344
elif is_complex_dtype (dtype ):
2358
2345
atom = _tables ().ComplexCol (itemsize = itemsize , shape = shape [0 ])
2359
-
2360
2346
elif is_string_dtype (dtype ):
2361
2347
atom = cls .get_atom_string (shape , itemsize )
2362
-
2363
2348
else :
2364
2349
atom = cls .get_atom_data (shape , kind = dtype .name )
2365
2350
@@ -2465,7 +2450,6 @@ def convert(self, values: np.ndarray, nan_rep, encoding: str, errors: str):
2465
2450
2466
2451
# reverse converts
2467
2452
if dtype == "datetime64" :
2468
-
2469
2453
# recreate with tz if indicated
2470
2454
converted = _set_tz (converted , tz , coerce = True )
2471
2455
@@ -2482,7 +2466,6 @@ def convert(self, values: np.ndarray, nan_rep, encoding: str, errors: str):
2482
2466
)
2483
2467
2484
2468
elif meta == "category" :
2485
-
2486
2469
# we have a categorical
2487
2470
categories = metadata
2488
2471
codes = converted .ravel ()
@@ -2837,7 +2820,6 @@ def read_array(
2837
2820
ret = node [start :stop ]
2838
2821
2839
2822
if dtype == "datetime64" :
2840
-
2841
2823
# reconstruct a timezone if indicated
2842
2824
tz = getattr (attrs , "tz" , None )
2843
2825
ret = _set_tz (ret , tz , coerce = True )
@@ -3041,7 +3023,6 @@ def write_array(self, key: str, value: ArrayLike, items: Optional[Index] = None)
3041
3023
self .write_array_empty (key , value )
3042
3024
3043
3025
elif value .dtype .type == np .object_ :
3044
-
3045
3026
# infer the type, warn if we have a non-string type here (for
3046
3027
# performance)
3047
3028
inferred_type = lib .infer_dtype (value , skipna = False )
@@ -3725,7 +3706,6 @@ def validate_data_columns(self, data_columns, min_itemsize, non_index_axes):
3725
3706
3726
3707
# if min_itemsize is a dict, add the keys (exclude 'values')
3727
3708
if isinstance (min_itemsize , dict ):
3728
-
3729
3709
existing_data_columns = set (data_columns )
3730
3710
data_columns = list (data_columns ) # ensure we do not modify
3731
3711
data_columns .extend (
@@ -4161,7 +4141,6 @@ def read_column(
4161
4141
# find the axes
4162
4142
for a in self .axes :
4163
4143
if column == a .name :
4164
-
4165
4144
if not a .is_data_indexable :
4166
4145
raise ValueError (
4167
4146
f"column [{ column } ] can not be extracted individually; "
@@ -4287,9 +4266,7 @@ def write_data(self, chunksize: Optional[int], dropna: bool = False):
4287
4266
# if dropna==True, then drop ALL nan rows
4288
4267
masks = []
4289
4268
if dropna :
4290
-
4291
4269
for a in self .values_axes :
4292
-
4293
4270
# figure the mask: only do if we can successfully process this
4294
4271
# column, otherwise ignore the mask
4295
4272
mask = isna (a .data ).all (axis = 0 )
@@ -4868,7 +4845,6 @@ def _unconvert_index(
4868
4845
def _maybe_convert_for_string_atom (
4869
4846
name : str , block , existing_col , min_itemsize , nan_rep , encoding , errors
4870
4847
):
4871
-
4872
4848
if not block .is_object :
4873
4849
return block .values
4874
4850
@@ -4901,7 +4877,6 @@ def _maybe_convert_for_string_atom(
4901
4877
# we cannot serialize this data, so report an exception on a column
4902
4878
# by column basis
4903
4879
for i in range (len (block .shape [0 ])):
4904
-
4905
4880
col = block .iget (i )
4906
4881
inferred_type = lib .infer_dtype (col , skipna = False )
4907
4882
if inferred_type != "string" :
0 commit comments