Skip to content

Commit 1a48e00

Browse files
Change occurrences of % and format() to f-strings
1 parent 4132f36 commit 1a48e00

13 files changed

+83
-83
lines changed

zarr/_storage/absstore.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ def __init__(self, container=None, prefix='', account_name=None, account_key=Non
7777
from azure.storage.blob import ContainerClient
7878
blob_service_kwargs = blob_service_kwargs or {}
7979
client = ContainerClient(
80-
"https://{}.blob.core.windows.net/".format(account_name), container,
80+
f"https://{account_name}.blob.core.windows.net/", container,
8181
credential=account_key, **blob_service_kwargs
8282
)
8383

zarr/_storage/store.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -227,7 +227,7 @@ def _validate_key(self, key: str):
227227
# TODO: Possibly allow key == ".zmetadata" too if we write a
228228
# consolidated metadata spec corresponding to this?
229229
):
230-
raise ValueError("keys starts with unexpected value: `{}`".format(key))
230+
raise ValueError(f"keys starts with unexpected value: `{key}`")
231231

232232
if key.endswith('/'):
233233
raise ValueError("keys may not end in /")

zarr/convenience.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -248,7 +248,7 @@ def save_group(store: StoreLike, *args, zarr_version=None, path=None, **kwargs):
248248
try:
249249
grp = _create_group(_store, path=path, overwrite=True, zarr_version=zarr_version)
250250
for i, arr in enumerate(args):
251-
k = 'arr_{}'.format(i)
251+
k = f'arr_{i}'
252252
grp.create_dataset(k, data=arr, overwrite=True, zarr_version=zarr_version)
253253
for k, arr in kwargs.items():
254254
grp.create_dataset(k, data=arr, overwrite=True, zarr_version=zarr_version)
@@ -517,9 +517,9 @@ def _log_copy_summary(log, dry_run, n_copied, n_skipped, n_bytes_copied):
517517
message = 'dry run: '
518518
else:
519519
message = 'all done: '
520-
message += '{:,} copied, {:,} skipped'.format(n_copied, n_skipped)
520+
message += f'{n_copied:,} copied, {n_skipped:,} skipped'
521521
if not dry_run:
522-
message += ', {:,} bytes copied'.format(n_bytes_copied)
522+
message += f', {n_bytes_copied:,} bytes copied'
523523
log(message)
524524

525525

@@ -710,14 +710,14 @@ def copy_store(source, dest, source_path='', dest_path='', excludes=None,
710710

711711
# take action
712712
if do_copy:
713-
log('copy {}'.format(descr))
713+
log(f'copy {descr}')
714714
if not dry_run:
715715
data = source[source_key]
716716
n_bytes_copied += buffer_size(data)
717717
dest[dest_key] = data
718718
n_copied += 1
719719
else:
720-
log('skip {}'.format(descr))
720+
log(f'skip {descr}')
721721
n_skipped += 1
722722

723723
# log a final message with a summary of what happened
@@ -728,7 +728,7 @@ def copy_store(source, dest, source_path='', dest_path='', excludes=None,
728728

729729
def _check_dest_is_group(dest):
730730
if not hasattr(dest, 'create_dataset'):
731-
raise ValueError('dest must be a group, got {!r}'.format(dest))
731+
raise ValueError(f'dest must be a group, got {dest!r}')
732732

733733

734734
def copy(source, dest, name=None, shallow=False, without_attrs=False, log=None,
@@ -915,7 +915,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists,
915915
if do_copy:
916916

917917
# log a message about what we're going to do
918-
log('copy {} {} {}'.format(source.name, source.shape, source.dtype))
918+
log(f'copy {source.name} {source.shape} {source.dtype}')
919919

920920
if not dry_run:
921921

@@ -986,7 +986,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists,
986986
n_copied += 1
987987

988988
else:
989-
log('skip {} {} {}'.format(source.name, source.shape, source.dtype))
989+
log(f'skip {source.name} {source.shape} {source.dtype}')
990990
n_skipped += 1
991991

992992
elif root or not shallow:
@@ -1008,7 +1008,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists,
10081008
if do_copy:
10091009

10101010
# log action
1011-
log('copy {}'.format(source.name))
1011+
log(f'copy {source.name}')
10121012

10131013
if not dry_run:
10141014

@@ -1045,7 +1045,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists,
10451045
n_copied += 1
10461046

10471047
else:
1048-
log('skip {}'.format(source.name))
1048+
log(f'skip {source.name}')
10491049
n_skipped += 1
10501050

10511051
return n_copied, n_skipped, n_bytes_copied

zarr/core.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2256,7 +2256,7 @@ def _encode_chunk(self, chunk):
22562256

22572257
def __repr__(self):
22582258
t = type(self)
2259-
r = '<{}.{}'.format(t.__module__, t.__name__)
2259+
r = f'<{t.__module__}.{t.__name__}'
22602260
if self.name:
22612261
r += ' %r' % self.name
22622262
r += ' %s' % str(self.shape)
@@ -2297,11 +2297,11 @@ def info_items(self):
22972297
def _info_items_nosync(self):
22982298

22992299
def typestr(o):
2300-
return '{}.{}'.format(type(o).__module__, type(o).__name__)
2300+
return f'{type(o).__module__}.{type(o).__name__}'
23012301

23022302
def bytestr(n):
23032303
if n > 2**10:
2304-
return '{} ({})'.format(n, human_readable_size(n))
2304+
return f'{n} ({human_readable_size(n)})'
23052305
else:
23062306
return str(n)
23072307

@@ -2342,7 +2342,7 @@ def bytestr(n):
23422342
('Storage ratio', '%.1f' % (self.nbytes / self.nbytes_stored)),
23432343
]
23442344
items += [
2345-
('Chunks initialized', '{}/{}'.format(self.nchunks_initialized, self.nchunks))
2345+
('Chunks initialized', f'{self.nchunks_initialized}/{self.nchunks}')
23462346
]
23472347

23482348
return items

zarr/hierarchy.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -316,7 +316,7 @@ def __len__(self):
316316

317317
def __repr__(self):
318318
t = type(self)
319-
r = '<{}.{}'.format(t.__module__, t.__name__)
319+
r = f'<{t.__module__}.{t.__name__}'
320320
if self.name:
321321
r += ' %r' % self.name
322322
if self._read_only:
@@ -335,7 +335,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
335335
def info_items(self):
336336

337337
def typestr(o):
338-
return '{}.{}'.format(type(o).__module__, type(o).__name__)
338+
return f'{type(o).__module__}.{type(o).__name__}'
339339

340340
items = []
341341

zarr/indexing.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -880,7 +880,7 @@ def check_fields(fields, dtype):
880880
# multiple field selection
881881
out_dtype = np.dtype([(f, dtype[f]) for f in fields])
882882
except KeyError as e:
883-
raise IndexError("invalid 'fields' argument, field not found: {!r}".format(e))
883+
raise IndexError(f"invalid 'fields' argument, field not found: {e!r}")
884884
else:
885885
return out_dtype
886886
else:

zarr/storage.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2807,7 +2807,7 @@ def __init__(self, prefix='zarr', dimension_separator=None, **kwargs):
28072807
self.client = redis.Redis(**kwargs)
28082808

28092809
def _key(self, key):
2810-
return '{prefix}:{key}'.format(prefix=self._prefix, key=key)
2810+
return f'{self._prefix}:{key}'
28112811

28122812
def __getitem__(self, key):
28132813
return self.client[self._key(key)]

zarr/tests/test_convenience.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ def test_open_array(path_type, zarr_version):
8181

8282
# path not found
8383
with pytest.raises(ValueError):
84-
open('doesnotexist', mode='r')
84+
open('doesnotexist')
8585

8686

8787
@pytest.mark.parametrize("zarr_version", _VERSIONS)

zarr/tests/test_core.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1155,7 +1155,7 @@ def test_dtypes(self):
11551155
# datetime, timedelta
11561156
for base_type in 'Mm':
11571157
for resolution in 'D', 'us', 'ns':
1158-
dtype = '{}8[{}]'.format(base_type, resolution)
1158+
dtype = f'{base_type}8[{resolution}]'
11591159
z = self.create_array(shape=100, dtype=dtype, fill_value=0)
11601160
assert z.dtype == np.dtype(dtype)
11611161
a = np.random.randint(np.iinfo('i8').min, np.iinfo('i8').max,
@@ -1348,7 +1348,7 @@ def compare_arrays(expected, actual, item_dtype):
13481348

13491349
# convenience API
13501350
for item_type in 'int', '<u4':
1351-
z = self.create_array(shape=data.shape, dtype='array:{}'.format(item_type))
1351+
z = self.create_array(shape=data.shape, dtype=f'array:{item_type}')
13521352
assert z.dtype == object
13531353
assert isinstance(z.filters[0], VLenArray)
13541354
assert z.filters[0].dtype == np.dtype(item_type)
@@ -1987,7 +1987,7 @@ def test_object_arrays_vlen_array(self):
19871987
# convenience API
19881988
for item_type in 'int', '<u4':
19891989
with pytest.raises(ValueError):
1990-
self.create_array(shape=data.shape, dtype='array:{}'.format(item_type))
1990+
self.create_array(shape=data.shape, dtype=f'array:{item_type}')
19911991

19921992
def test_object_arrays_danger(self):
19931993
# Cannot hacking out object codec as N5 doesn't allow object codecs

0 commit comments

Comments
 (0)