Description
The current implementation requires you to pass the total length of the iterable, and this usually means making an extra query to find out the count. There's no need for this, since we can actually accomplish the same results by trying to fetch n+1 results, which will mean that there are more to be consumed, without the need for the real total count.
This is the custom implementation we have applied to spare us the count query, which can be costly and adds an unnecessary overhead to every query. We only need the real total count, when that value is requested (using a custom CountableConnection
)
class CountableConnection(graphene.relay.Connection):
class Meta:
abstract = True
total_count = graphene.Int()
@staticmethod
def resolve_total_count(root, info, *args, **kwargs):
try:
t_count = root.iterable.count()
except:
t_count = len(root.iterable)
return t_count
def connection_from_list_slice(list_slice, args=None, connection_type=None,
edge_type=None, pageinfo_type=None):
'''
Given an iterator it consumes the needed amount based on the pagination
params, and also tries to figure out if there are more results to be
consumed. We do so by trying to fetch one more element than the specified
amount, if we were able to fetch n+1 it means there are more to be consumed.
This spares the caller passing the total count of results, which
usually means making an extra query just to find out that number.
'''
from graphql_relay.utils import base64, unbase64, is_str
from graphql_relay.connection.connectiontypes import Connection, PageInfo, Edge
connection_type = connection_type or Connection
edge_type = edge_type or Edge
pageinfo_type = pageinfo_type or PageInfo
args = args or {}
before = args.get('before')
after = args.get('after')
first = args.get('first')
last = args.get('last')
if first:
after = get_offset_with_default(after, -1) + 1
_slice = list_slice[after: max(after, 0) + first + 1] # fetch n+1
items = _slice[:-1]
if len(items) < first:
items = _slice[:] # If there are not enough, get them all
edges = [
edge_type(
node=node,
cursor=offset_to_cursor(after + i)
)
for i, node in enumerate(items)
]
elif last:
if before:
before = get_offset_with_default(before)
_slice = list_slice[max(before-last-1, 0):before] # fetch n+1
else:
# beware that not specifying before results in the need
# to calculate the total amount
_slice = list_slice[(last*-1)-1:]
try:
before = list_slice.count()
except:
before = len(list_slice)
items = _slice[1:]
if len(items) < last:
items = _slice[:] # If there are not enough, get them all
edges = [
edge_type(
node=node,
cursor=offset_to_cursor(before - last -1 + i)
)
for i, node in enumerate(items)
]
else: # we are not allowing to pass after/before without first/last
items = list_slice[:]
edges = [
edge_type(
node=node,
cursor=offset_to_cursor(i)
)
for i, node in enumerate(items)
]
first_edge_cursor = edges[0].cursor if edges else None
last_edge_cursor = edges[-1].cursor if edges else None
has_previous_page = False
if (isinstance(last, int) and len(_slice) > last) or after > 0:
has_previous_page = True
return connection_type(
edges=edges,
page_info=pageinfo_type(
start_cursor=first_edge_cursor,
end_cursor=last_edge_cursor,
has_previous_page=has_previous_page,
has_next_page=len(_slice) > first if isinstance(first, int) else False
)
)
@syrusakbary Not sure if we want this as the default or at least have this as an optional stragey.
If this sounds reasonable I can make a PR for this.