/*
* SARG Squid Analysis Report Generator http://sarg.sourceforge.net
- * 1998, 2012
+ * 1998, 2015
*
* SARG donations:
* please look at http://sarg.sourceforge.net/donations.php
#include "include/conf.h"
#include "include/defs.h"
+//! The size, in bytes, to allocate from the start.
#define INITIAL_LINE_BUFFER_SIZE 32768
+/*!
+The amount by which the line buffer size is increased when it turns out to be too small to accomodate
+the line to read.
+*/
+#define LINE_BUFFER_SIZE_INCREMENT 8192
+/*!
+Maximum size of the line buffer.
+
+A text line read from the file must be smaller than this value or the functions fails
+and aborts the program.
+
+10MB should not be a problem as most of the line is filled with the URL and squid 3
+limits the URL to 4096 bytes (see MAX_URL). Squid has reportedly been tested with
+MAX_URL set up to 32KB so I'm not expecting URL much longer than that.
+
+Other proxies might handle longer URLs but certainly not longer than 10MB.
+
+Now, why put a limit? Sarg version 2.3 originaly had no limits until sarg 2.3.3. At
+that point a user with a defective network mount point reported that sarg was eating
+up 8GB of memory available on the server triggering the OOM killer. So the limit is
+here to prevent sarg from choking on an invalid file.
+*/
+#define MAX_LINE_BUFFER_SIZE (10*1024*1024)
struct longlinestruct
{
line->start=0;
}
if (line->length>=line->size) {
- line->size+=8192;
+ line->size+=LINE_BUFFER_SIZE_INCREMENT;
+ if (line->size>=MAX_LINE_BUFFER_SIZE) {
+ debuga(_("A text line is more than %d bytes long denoting a corrupted file\n"),MAX_LINE_BUFFER_SIZE);
+ exit(EXIT_FAILURE);
+ }
newbuf=realloc(line->buffer,line->size);
if (!newbuf) {
debuga(_("Not enough memory to read one more line from the file\n"));