]> git.ipfire.org Git - thirdparty/sarg.git/blob - download.c
Make the reports title consistent
[thirdparty/sarg.git] / download.c
1 /*
2 * SARG Squid Analysis Report Generator http://sarg.sourceforge.net
3 * 1998, 2010
4 *
5 * SARG donations:
6 * please look at http://sarg.sourceforge.net/donations.php
7 * Support:
8 * http://sourceforge.net/projects/sarg/forums/forum/363374
9 * ---------------------------------------------------------------------
10 *
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License as published by
13 * the Free Software Foundation; either version 2 of the License, or
14 * (at your option) any later version.
15 *
16 * This program is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 * GNU General Public License for more details.
20 *
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, write to the Free Software
23 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111, USA.
24 *
25 */
26
27 #include "include/conf.h"
28 #include "include/defs.h"
29
30 /*@null@*/static char *DownloadSuffix=NULL;
31 /*@null@*/static char **DownloadSuffixIndex=NULL;
32 static int NDownloadSuffix=0;
33
34 void download_report(void)
35 {
36
37 FILE *fp_in = NULL, *fp_ou = NULL;
38
39 char *buf;
40 char *url;
41 char report_in[MAXLEN];
42 char report[MAXLEN];
43 char ip[MAXLEN];
44 char oip[MAXLEN];
45 char user[MAXLEN];
46 char ouser[MAXLEN];
47 char ouser2[MAXLEN];
48 char data[15];
49 char hora[15];
50 int z=0;
51 int count=0;
52 int i;
53 bool new_user;
54 struct getwordstruct gwarea;
55 longline line;
56 struct userinfostruct *uinfo;
57
58 ouser[0]='\0';
59 ouser2[0]='\0';
60
61 snprintf(report_in,sizeof(report_in),"%s/sarg/download.log",TempDir);
62 if(access(report_in, R_OK) != 0)
63 return;
64
65 snprintf(report,sizeof(report),"%s/download.html",outdirname);
66
67 if((fp_in=MY_FOPEN(report_in,"r"))==NULL) {
68 debuga(_("(download) Cannot open log file %s\n"),report_in);
69 exit(EXIT_FAILURE);
70 }
71
72 if((fp_ou=MY_FOPEN(report,"w"))==NULL) {
73 debuga(_("(download) Cannot open log file %s\n"),report);
74 exit(EXIT_FAILURE);
75 }
76
77 write_html_header(fp_ou,(IndexTree == INDEX_TREE_DATE) ? 3 : 1,_("Downloads"));
78 fputs("<tr><td class=\"header_c\">",fp_ou);
79 fprintf(fp_ou,_("Period: %s"),period.html);
80 fputs("</td></tr>\n",fp_ou);
81 fprintf(fp_ou,"<tr><th class=\"header_c\">%s</th></tr>\n",_("Downloads"));
82 close_html_header(fp_ou);
83
84 fputs("<div class=\"report\"><table cellpadding=\"0\" cellspacing=\"2\">\n",fp_ou);
85 fprintf(fp_ou,"<tr><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th></tr>\n",_("USERID"),_("IP/NAME"),_("DATE/TIME"),_("ACCESSED SITE"));
86
87 if ((line=longline_create())==NULL) {
88 debuga(_("Not enough memory to read the downloaded files\n"));
89 exit(EXIT_FAILURE);
90 }
91
92 while((buf=longline_read(fp_in,line))!=NULL) {
93 getword_start(&gwarea,buf);
94 if (getword(data,sizeof(data),&gwarea,'\t')<0 || getword(hora,sizeof(hora),&gwarea,'\t')<0 ||
95 getword(user,sizeof(user),&gwarea,'\t')<0 || getword(ip,sizeof(ip),&gwarea,'\t')<0) {
96 debuga(_("There is a broken record or garbage in file %s\n"),report_in);
97 exit(EXIT_FAILURE);
98 }
99 if (getword_ptr(buf,&url,&gwarea,'\t')<0) {
100 debuga(_("There is a broken url in file %s\n"),report_in);
101 exit(EXIT_FAILURE);
102 }
103
104 uinfo=userinfo_find_from_id(user);
105 if (!uinfo) {
106 debuga(_("Unknown user ID %s in file %s\n"),user,report_in);
107 exit(EXIT_FAILURE);
108 }
109 new_user=false;
110 if(!z) {
111 strcpy(ouser,user);
112 strcpy(oip,ip);
113 z++;
114 new_user=true;
115 } else {
116 if(strcmp(ouser,user) != 0) {
117 strcpy(ouser,user);
118 new_user=true;
119 }
120 if(strcmp(oip,ip) != 0) {
121 strcpy(oip,ip);
122 new_user=true;
123 }
124 }
125
126 if(DownloadReportLimit) {
127 if(strcmp(ouser2,uinfo->label) == 0) {
128 count++;
129 } else {
130 count=1;
131 strcpy(ouser2,uinfo->label);
132 }
133 if(count >= DownloadReportLimit)
134 continue;
135 }
136
137 for (i=strlen(url)-1 ; i>=0 && (unsigned char)url[i]<' ' ; i--) url[i]=0;
138
139 fputs("<tr>",fp_ou);
140 if (new_user)
141 fprintf(fp_ou,"<td class=\"data\">%s</td><td class=\"data\">%s</td>",uinfo->label,ip);
142 else
143 fputs("<td class=\"data\"></td><td class=\"data\"></td>",fp_ou);
144 fprintf(fp_ou,"<td class=\"data\">%s-%s</td><td class=\"data2\">",data,hora);
145 if(BlockIt[0]!='\0') {
146 fprintf(fp_ou,"<a href=\"%s%s?url=\"",wwwDocumentRoot,BlockIt);
147 output_html_url(fp_ou,url);
148 fprintf(fp_ou,"\"><img src=\"%s/sarg-squidguard-block.png\"></a>&nbsp;",ImageFile);
149 }
150 fputs("<a href=\"http://",fp_ou);
151 output_html_url(fp_ou,url);
152 fputs("\">http://",fp_ou);
153 output_html_string(fp_ou,url,100);
154 fputs("</a></td></tr>\n",fp_ou);
155 }
156 fclose(fp_in);
157 longline_destroy(&line);
158
159 fputs("</table></div>\n",fp_ou);
160 if (write_html_trailer(fp_ou)<0)
161 debuga(_("Write error in file %s\n"),report);
162 if (fclose(fp_ou)==EOF)
163 debuga(_("Failed to close file %s - %s\n"),report,strerror(errno));
164
165 unlink(report_in);
166
167 return;
168 }
169
170 void free_download(void)
171 {
172 if (DownloadSuffix) {
173 free(DownloadSuffix);
174 DownloadSuffix=NULL;
175 }
176 if (DownloadSuffixIndex) {
177 free(DownloadSuffixIndex);
178 DownloadSuffixIndex=NULL;
179 }
180 NDownloadSuffix=0;
181 }
182
183 void set_download_suffix(const char *list)
184 {
185 char *str;
186 int i, j, k;
187 int cmp;
188
189 free_download();
190
191 DownloadSuffix=strdup(list);
192 if (!DownloadSuffix) {
193 debuga(_("Download suffix list too long\n"));
194 exit(EXIT_FAILURE);
195 }
196 j = 1;
197 for (i=0 ; list[i] ; i++)
198 if (list[i] == ',') j++;
199 DownloadSuffixIndex=malloc(j*sizeof(char *));
200 if (!DownloadSuffixIndex) {
201 debuga(_("Too many download suffixes\n"));
202 exit(EXIT_FAILURE);
203 }
204
205 str = DownloadSuffix;
206 for (i=0 ; DownloadSuffix[i] ; i++) {
207 if (DownloadSuffix[i] == ',') {
208 DownloadSuffix[i] = '\0';
209 if (*str) {
210 cmp = -1;
211 for (j=0 ; j<NDownloadSuffix && (cmp=strcasecmp(str,DownloadSuffixIndex[j]))>0 ; j++);
212 if (cmp != 0) {
213 for (k=NDownloadSuffix ; k>j ; k--)
214 DownloadSuffixIndex[k]=DownloadSuffixIndex[k-1];
215 NDownloadSuffix++;
216 DownloadSuffixIndex[j]=str;
217 }
218 }
219 str=DownloadSuffix+i+1;
220 }
221 }
222
223 if (*str) {
224 cmp = -1;
225 for (j=0 ; j<NDownloadSuffix && (cmp=strcasecmp(str,DownloadSuffixIndex[j]))>0 ; j++);
226 if (cmp != 0) {
227 for (k=NDownloadSuffix ; k>j ; k--)
228 DownloadSuffixIndex[k]=DownloadSuffixIndex[k-1];
229 NDownloadSuffix++;
230 DownloadSuffixIndex[j]=str;
231 }
232 }
233 }
234
235 bool is_download_suffix(const char *url)
236 {
237 int urllen;
238 int i;
239 int down, up, center;
240 const char *suffix;
241 int cmp;
242 const int max_suffix=10;
243
244 if (DownloadSuffix == NULL || NDownloadSuffix == 0) return(false);
245
246 urllen=strlen(url)-1;
247 if (urllen<=0) return(false);
248 if (url[urllen] == '.') return(false); //reject a single trailing dot
249 for (i=0 ; i<urllen && (url[i]!='/' || url[i+1]=='/') && url[i]!='?' ; i++);
250 if (i>=urllen) return(false); // url is a hostname without any path or file to download
251
252 for (i=0 ; i<=max_suffix && i<urllen && url[urllen-i]!='.' ; i++)
253 if (url[urllen-i] == '/' || url[urllen-i] == '?') return(false);
254 if (i>max_suffix || i>=urllen) return(false);
255
256 suffix=url+urllen-i+1;
257 down=0;
258 up=NDownloadSuffix-1;
259 while (down<=up) {
260 center=(down+up)/2;
261 cmp=strcasecmp(suffix,DownloadSuffixIndex[center]);
262 if (cmp == 0) return(true);
263 if (cmp < 0)
264 up = center-1;
265 else
266 down = center+1;
267 }
268 return(false);
269 }
270