The pre-fetching behavior fetches only one row initially, and then
grows its buffer size by a fixed amount with each successive need
- for additional rows up to a size of 100.
+ for additional rows up to a size of 1000.
"""
def _init_metadata(self):
5: 10,
10: 20,
20: 50,
- 50: 100
+ 50: 100,
+ 100: 250,
+ 250: 500,
+ 500: 1000
}
def __buffer_rows(self):
size = getattr(self, '_bufsize', 1)
self.__rowbuffer = collections.deque(self.cursor.fetchmany(size))
self._bufsize = self.size_growth.get(size, size)
+ if self.context.execution_options.get('max_row_buffer') is not None:
+ self._bufsize = min(self.context.execution_options['max_row_buffer'], self._bufsize)
def _soft_close(self, **kw):
self.__rowbuffer.clear()
"""
self._yield_per = count
self._execution_options = self._execution_options.union(
- {"stream_results": True})
+ {"stream_results": True,
+ "max_row_buffer": count})
def get(self, ident):
"""Return an instance based on the given primary key identifier,