]> git.ipfire.org Git - thirdparty/sarg.git/blob - download.c
Report any error while reading the day summary file
[thirdparty/sarg.git] / download.c
1 /*
2 * SARG Squid Analysis Report Generator http://sarg.sourceforge.net
3 * 1998, 2011
4 *
5 * SARG donations:
6 * please look at http://sarg.sourceforge.net/donations.php
7 * Support:
8 * http://sourceforge.net/projects/sarg/forums/forum/363374
9 * ---------------------------------------------------------------------
10 *
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License as published by
13 * the Free Software Foundation; either version 2 of the License, or
14 * (at your option) any later version.
15 *
16 * This program is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 * GNU General Public License for more details.
20 *
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, write to the Free Software
23 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111, USA.
24 *
25 */
26
27 #include "include/conf.h"
28 #include "include/defs.h"
29
30 /*@null@*/static char *DownloadSuffix=NULL;
31 /*@null@*/static char **DownloadSuffixIndex=NULL;
32 static int NDownloadSuffix=0;
33
34 void download_report(void)
35 {
36 FILE *fp_in = NULL, *fp_ou = NULL;
37
38 char *buf;
39 char *url;
40 char report_in[MAXLEN];
41 char report[MAXLEN];
42 char ip[MAXLEN];
43 char oip[MAXLEN];
44 char user[MAXLEN];
45 char ouser[MAXLEN];
46 char ouser2[MAXLEN];
47 char data[15];
48 char hora[15];
49 int z=0;
50 int count=0;
51 int i;
52 int day,month,year;
53 bool new_user;
54 struct getwordstruct gwarea;
55 longline line;
56 struct userinfostruct *uinfo;
57 struct tm t;
58
59 ouser[0]='\0';
60 ouser2[0]='\0';
61
62 snprintf(report_in,sizeof(report_in),"%s/download.log",tmp);
63 if(access(report_in, R_OK) != 0) {
64 if (debugz) debugaz(_("Downloaded files report not generated as it is empty\n"));
65 return;
66 }
67
68 snprintf(report,sizeof(report),"%s/download.html",outdirname);
69
70 if((fp_in=MY_FOPEN(report_in,"r"))==NULL) {
71 debuga(_("(download) Cannot open log file %s\n"),report_in);
72 exit(EXIT_FAILURE);
73 }
74
75 if((fp_ou=MY_FOPEN(report,"w"))==NULL) {
76 debuga(_("(download) Cannot open log file %s\n"),report);
77 exit(EXIT_FAILURE);
78 }
79
80 write_html_header(fp_ou,(IndexTree == INDEX_TREE_DATE) ? 3 : 1,_("Downloads"),HTML_JS_NONE);
81 fputs("<tr><td class=\"header_c\">",fp_ou);
82 fprintf(fp_ou,_("Period: %s"),period.html);
83 fputs("</td></tr>\n",fp_ou);
84 fprintf(fp_ou,"<tr><th class=\"header_c\">%s</th></tr>\n",_("Downloads"));
85 close_html_header(fp_ou);
86
87 fputs("<div class=\"report\"><table cellpadding=\"0\" cellspacing=\"2\">\n",fp_ou);
88 fprintf(fp_ou,"<tr><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th></tr>\n",_("USERID"),_("IP/NAME"),_("DATE/TIME"),_("ACCESSED SITE"));
89
90 if ((line=longline_create())==NULL) {
91 debuga(_("Not enough memory to read the downloaded files\n"));
92 exit(EXIT_FAILURE);
93 }
94
95 while((buf=longline_read(fp_in,line))!=NULL) {
96 getword_start(&gwarea,buf);
97 if (getword(data,sizeof(data),&gwarea,'\t')<0 || getword(hora,sizeof(hora),&gwarea,'\t')<0 ||
98 getword(user,sizeof(user),&gwarea,'\t')<0 || getword(ip,sizeof(ip),&gwarea,'\t')<0) {
99 debuga(_("There is a broken record or garbage in file %s\n"),report_in);
100 exit(EXIT_FAILURE);
101 }
102 if (getword_ptr(buf,&url,&gwarea,'\t')<0) {
103 debuga(_("There is a broken url in file %s\n"),report_in);
104 exit(EXIT_FAILURE);
105 }
106 if (sscanf(data,"%d/%d/%d",&day,&month,&year)!=3) continue;
107 computedate(year,month,day,&t);
108 strftime(data,sizeof(data),"%x",&t);
109
110 uinfo=userinfo_find_from_id(user);
111 if (!uinfo) {
112 debuga(_("Unknown user ID %s in file %s\n"),user,report_in);
113 exit(EXIT_FAILURE);
114 }
115 new_user=false;
116 if(!z) {
117 strcpy(ouser,user);
118 strcpy(oip,ip);
119 z++;
120 new_user=true;
121 } else {
122 if(strcmp(ouser,user) != 0) {
123 strcpy(ouser,user);
124 new_user=true;
125 }
126 if(strcmp(oip,ip) != 0) {
127 strcpy(oip,ip);
128 new_user=true;
129 }
130 }
131
132 if(DownloadReportLimit) {
133 if(strcmp(ouser2,uinfo->label) == 0) {
134 count++;
135 } else {
136 count=1;
137 strcpy(ouser2,uinfo->label);
138 }
139 if(count >= DownloadReportLimit)
140 continue;
141 }
142
143 for (i=strlen(url)-1 ; i>=0 && (unsigned char)url[i]<' ' ; i--) url[i]=0;
144
145 fputs("<tr>",fp_ou);
146 if (new_user) {
147 if (uinfo->topuser)
148 fprintf(fp_ou,"<td class=\"data\"><a href=\"%s/%s.html\">%s</a></td><td class=\"data\">%s</td>",uinfo->filename,uinfo->filename,uinfo->label,ip);
149 else
150 fprintf(fp_ou,"<td class=\"data\">%s</td><td class=\"data\">%s</td>",uinfo->label,ip);
151 } else
152 fputs("<td class=\"data\"></td><td class=\"data\"></td>",fp_ou);
153 fprintf(fp_ou,"<td class=\"data\">%s-%s</td><td class=\"data2\">",data,hora);
154 if(BlockIt[0]!='\0' && url[0]!=ALIAS_PREFIX) {
155 fprintf(fp_ou,"<a href=\"%s%s?url=\"",wwwDocumentRoot,BlockIt);
156 output_html_url(fp_ou,url);
157 fprintf(fp_ou,"\"><img src=\"%s/sarg-squidguard-block.png\"></a>&nbsp;",ImageFile);
158 }
159 output_html_link(fp_ou,"",url,100);//scheme is kept from the log file
160 fputs("</td></tr>\n",fp_ou);
161 }
162 fclose(fp_in);
163 longline_destroy(&line);
164
165 fputs("</table></div>\n",fp_ou);
166 if (write_html_trailer(fp_ou)<0)
167 debuga(_("Write error in file %s\n"),report);
168 if (fclose(fp_ou)==EOF)
169 debuga(_("Failed to close file %s - %s\n"),report,strerror(errno));
170
171 if (unlink(report_in)) {
172 debuga(_("Cannot delete %s - %s\n"),report_in,strerror(errno));
173 exit(EXIT_FAILURE);
174 }
175
176 return;
177 }
178
179 void free_download(void)
180 {
181 if (DownloadSuffix) {
182 free(DownloadSuffix);
183 DownloadSuffix=NULL;
184 }
185 if (DownloadSuffixIndex) {
186 free(DownloadSuffixIndex);
187 DownloadSuffixIndex=NULL;
188 }
189 NDownloadSuffix=0;
190 }
191
192 void set_download_suffix(const char *list)
193 {
194 char *str;
195 int i, j, k;
196 int cmp;
197
198 free_download();
199
200 DownloadSuffix=strdup(list);
201 if (!DownloadSuffix) {
202 debuga(_("Download suffix list too long\n"));
203 exit(EXIT_FAILURE);
204 }
205 j = 1;
206 for (i=0 ; list[i] ; i++)
207 if (list[i] == ',') j++;
208 DownloadSuffixIndex=malloc(j*sizeof(char *));
209 if (!DownloadSuffixIndex) {
210 debuga(_("Too many download suffixes\n"));
211 exit(EXIT_FAILURE);
212 }
213
214 str = DownloadSuffix;
215 for (i=0 ; DownloadSuffix[i] ; i++) {
216 if (DownloadSuffix[i] == ',') {
217 DownloadSuffix[i] = '\0';
218 if (*str) {
219 cmp = -1;
220 for (j=0 ; j<NDownloadSuffix && (cmp=strcasecmp(str,DownloadSuffixIndex[j]))>0 ; j++);
221 if (cmp != 0) {
222 for (k=NDownloadSuffix ; k>j ; k--)
223 DownloadSuffixIndex[k]=DownloadSuffixIndex[k-1];
224 NDownloadSuffix++;
225 DownloadSuffixIndex[j]=str;
226 }
227 }
228 str=DownloadSuffix+i+1;
229 }
230 }
231
232 if (*str) {
233 cmp = -1;
234 for (j=0 ; j<NDownloadSuffix && (cmp=strcasecmp(str,DownloadSuffixIndex[j]))>0 ; j++);
235 if (cmp != 0) {
236 for (k=NDownloadSuffix ; k>j ; k--)
237 DownloadSuffixIndex[k]=DownloadSuffixIndex[k-1];
238 NDownloadSuffix++;
239 DownloadSuffixIndex[j]=str;
240 }
241 }
242 }
243
244 bool is_download_suffix(const char *url)
245 {
246 int urllen;
247 int i;
248 int down, up, center;
249 const char *suffix;
250 int cmp;
251 const int max_suffix=10;
252
253 if (DownloadSuffix == NULL || NDownloadSuffix == 0) return(false);
254
255 urllen=strlen(url)-1;
256 if (urllen<=0) return(false);
257 if (url[urllen] == '.') return(false); //reject a single trailing dot
258 for (i=0 ; i<urllen && (url[i]!='/' || url[i+1]=='/') && url[i]!='?' ; i++);
259 if (i>=urllen) return(false); // url is a hostname without any path or file to download
260
261 for (i=0 ; i<=max_suffix && i<urllen && url[urllen-i]!='.' ; i++)
262 if (url[urllen-i] == '/' || url[urllen-i] == '?') return(false);
263 if (i>max_suffix || i>=urllen) return(false);
264
265 suffix=url+urllen-i+1;
266 down=0;
267 up=NDownloadSuffix-1;
268 while (down<=up) {
269 center=(down+up)/2;
270 cmp=strcasecmp(suffix,DownloadSuffixIndex[center]);
271 if (cmp == 0) return(true);
272 if (cmp < 0)
273 up = center-1;
274 else
275 down = center+1;
276 }
277 return(false);
278 }
279