]> git.ipfire.org Git - thirdparty/sarg.git/blob - download.c
Improve javascript inclusion in HTML files
[thirdparty/sarg.git] / download.c
1 /*
2 * SARG Squid Analysis Report Generator http://sarg.sourceforge.net
3 * 1998, 2010
4 *
5 * SARG donations:
6 * please look at http://sarg.sourceforge.net/donations.php
7 * Support:
8 * http://sourceforge.net/projects/sarg/forums/forum/363374
9 * ---------------------------------------------------------------------
10 *
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License as published by
13 * the Free Software Foundation; either version 2 of the License, or
14 * (at your option) any later version.
15 *
16 * This program is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 * GNU General Public License for more details.
20 *
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, write to the Free Software
23 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111, USA.
24 *
25 */
26
27 #include "include/conf.h"
28 #include "include/defs.h"
29
30 /*@null@*/static char *DownloadSuffix=NULL;
31 /*@null@*/static char **DownloadSuffixIndex=NULL;
32 static int NDownloadSuffix=0;
33
34 void download_report(void)
35 {
36
37 FILE *fp_in = NULL, *fp_ou = NULL;
38
39 char *buf;
40 char *url;
41 char report_in[MAXLEN];
42 char report[MAXLEN];
43 char ip[MAXLEN];
44 char oip[MAXLEN];
45 char user[MAXLEN];
46 char ouser[MAXLEN];
47 char ouser2[MAXLEN];
48 char data[15];
49 char hora[15];
50 int z=0;
51 int count=0;
52 int i;
53 int day,month,year;
54 bool new_user;
55 struct getwordstruct gwarea;
56 longline line;
57 struct userinfostruct *uinfo;
58 struct tm t;
59
60 ouser[0]='\0';
61 ouser2[0]='\0';
62
63 snprintf(report_in,sizeof(report_in),"%s/download.log",tmp);
64 if(access(report_in, R_OK) != 0)
65 return;
66
67 snprintf(report,sizeof(report),"%s/download.html",outdirname);
68
69 if((fp_in=MY_FOPEN(report_in,"r"))==NULL) {
70 debuga(_("(download) Cannot open log file %s\n"),report_in);
71 exit(EXIT_FAILURE);
72 }
73
74 if((fp_ou=MY_FOPEN(report,"w"))==NULL) {
75 debuga(_("(download) Cannot open log file %s\n"),report);
76 exit(EXIT_FAILURE);
77 }
78
79 write_html_header(fp_ou,(IndexTree == INDEX_TREE_DATE) ? 3 : 1,_("Downloads"),HTML_JS_NONE);
80 fputs("<tr><td class=\"header_c\">",fp_ou);
81 fprintf(fp_ou,_("Period: %s"),period.html);
82 fputs("</td></tr>\n",fp_ou);
83 fprintf(fp_ou,"<tr><th class=\"header_c\">%s</th></tr>\n",_("Downloads"));
84 close_html_header(fp_ou);
85
86 fputs("<div class=\"report\"><table cellpadding=\"0\" cellspacing=\"2\">\n",fp_ou);
87 fprintf(fp_ou,"<tr><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th></tr>\n",_("USERID"),_("IP/NAME"),_("DATE/TIME"),_("ACCESSED SITE"));
88
89 if ((line=longline_create())==NULL) {
90 debuga(_("Not enough memory to read the downloaded files\n"));
91 exit(EXIT_FAILURE);
92 }
93
94 while((buf=longline_read(fp_in,line))!=NULL) {
95 getword_start(&gwarea,buf);
96 if (getword(data,sizeof(data),&gwarea,'\t')<0 || getword(hora,sizeof(hora),&gwarea,'\t')<0 ||
97 getword(user,sizeof(user),&gwarea,'\t')<0 || getword(ip,sizeof(ip),&gwarea,'\t')<0) {
98 debuga(_("There is a broken record or garbage in file %s\n"),report_in);
99 exit(EXIT_FAILURE);
100 }
101 if (getword_ptr(buf,&url,&gwarea,'\t')<0) {
102 debuga(_("There is a broken url in file %s\n"),report_in);
103 exit(EXIT_FAILURE);
104 }
105 if (sscanf(data,"%d/%d/%d",&day,&month,&year)!=3) continue;
106 computedate(year,month,day,&t);
107 strftime(data,sizeof(data),"%x",&t);
108
109 uinfo=userinfo_find_from_id(user);
110 if (!uinfo) {
111 debuga(_("Unknown user ID %s in file %s\n"),user,report_in);
112 exit(EXIT_FAILURE);
113 }
114 new_user=false;
115 if(!z) {
116 strcpy(ouser,user);
117 strcpy(oip,ip);
118 z++;
119 new_user=true;
120 } else {
121 if(strcmp(ouser,user) != 0) {
122 strcpy(ouser,user);
123 new_user=true;
124 }
125 if(strcmp(oip,ip) != 0) {
126 strcpy(oip,ip);
127 new_user=true;
128 }
129 }
130
131 if(DownloadReportLimit) {
132 if(strcmp(ouser2,uinfo->label) == 0) {
133 count++;
134 } else {
135 count=1;
136 strcpy(ouser2,uinfo->label);
137 }
138 if(count >= DownloadReportLimit)
139 continue;
140 }
141
142 for (i=strlen(url)-1 ; i>=0 && (unsigned char)url[i]<' ' ; i--) url[i]=0;
143
144 fputs("<tr>",fp_ou);
145 if (new_user)
146 fprintf(fp_ou,"<td class=\"data\">%s</td><td class=\"data\">%s</td>",uinfo->label,ip);
147 else
148 fputs("<td class=\"data\"></td><td class=\"data\"></td>",fp_ou);
149 fprintf(fp_ou,"<td class=\"data\">%s-%s</td><td class=\"data2\">",data,hora);
150 if(BlockIt[0]!='\0') {
151 fprintf(fp_ou,"<a href=\"%s%s?url=\"",wwwDocumentRoot,BlockIt);
152 output_html_url(fp_ou,url);
153 fprintf(fp_ou,"\"><img src=\"%s/sarg-squidguard-block.png\"></a>&nbsp;",ImageFile);
154 }
155 fputs("<a href=\"http://",fp_ou);
156 output_html_url(fp_ou,url);
157 fputs("\">http://",fp_ou);
158 output_html_string(fp_ou,url,100);
159 fputs("</a></td></tr>\n",fp_ou);
160 }
161 fclose(fp_in);
162 longline_destroy(&line);
163
164 fputs("</table></div>\n",fp_ou);
165 if (write_html_trailer(fp_ou)<0)
166 debuga(_("Write error in file %s\n"),report);
167 if (fclose(fp_ou)==EOF)
168 debuga(_("Failed to close file %s - %s\n"),report,strerror(errno));
169
170 unlink(report_in);
171
172 return;
173 }
174
175 void free_download(void)
176 {
177 if (DownloadSuffix) {
178 free(DownloadSuffix);
179 DownloadSuffix=NULL;
180 }
181 if (DownloadSuffixIndex) {
182 free(DownloadSuffixIndex);
183 DownloadSuffixIndex=NULL;
184 }
185 NDownloadSuffix=0;
186 }
187
188 void set_download_suffix(const char *list)
189 {
190 char *str;
191 int i, j, k;
192 int cmp;
193
194 free_download();
195
196 DownloadSuffix=strdup(list);
197 if (!DownloadSuffix) {
198 debuga(_("Download suffix list too long\n"));
199 exit(EXIT_FAILURE);
200 }
201 j = 1;
202 for (i=0 ; list[i] ; i++)
203 if (list[i] == ',') j++;
204 DownloadSuffixIndex=malloc(j*sizeof(char *));
205 if (!DownloadSuffixIndex) {
206 debuga(_("Too many download suffixes\n"));
207 exit(EXIT_FAILURE);
208 }
209
210 str = DownloadSuffix;
211 for (i=0 ; DownloadSuffix[i] ; i++) {
212 if (DownloadSuffix[i] == ',') {
213 DownloadSuffix[i] = '\0';
214 if (*str) {
215 cmp = -1;
216 for (j=0 ; j<NDownloadSuffix && (cmp=strcasecmp(str,DownloadSuffixIndex[j]))>0 ; j++);
217 if (cmp != 0) {
218 for (k=NDownloadSuffix ; k>j ; k--)
219 DownloadSuffixIndex[k]=DownloadSuffixIndex[k-1];
220 NDownloadSuffix++;
221 DownloadSuffixIndex[j]=str;
222 }
223 }
224 str=DownloadSuffix+i+1;
225 }
226 }
227
228 if (*str) {
229 cmp = -1;
230 for (j=0 ; j<NDownloadSuffix && (cmp=strcasecmp(str,DownloadSuffixIndex[j]))>0 ; j++);
231 if (cmp != 0) {
232 for (k=NDownloadSuffix ; k>j ; k--)
233 DownloadSuffixIndex[k]=DownloadSuffixIndex[k-1];
234 NDownloadSuffix++;
235 DownloadSuffixIndex[j]=str;
236 }
237 }
238 }
239
240 bool is_download_suffix(const char *url)
241 {
242 int urllen;
243 int i;
244 int down, up, center;
245 const char *suffix;
246 int cmp;
247 const int max_suffix=10;
248
249 if (DownloadSuffix == NULL || NDownloadSuffix == 0) return(false);
250
251 urllen=strlen(url)-1;
252 if (urllen<=0) return(false);
253 if (url[urllen] == '.') return(false); //reject a single trailing dot
254 for (i=0 ; i<urllen && (url[i]!='/' || url[i+1]=='/') && url[i]!='?' ; i++);
255 if (i>=urllen) return(false); // url is a hostname without any path or file to download
256
257 for (i=0 ; i<=max_suffix && i<urllen && url[urllen-i]!='.' ; i++)
258 if (url[urllen-i] == '/' || url[urllen-i] == '?') return(false);
259 if (i>max_suffix || i>=urllen) return(false);
260
261 suffix=url+urllen-i+1;
262 down=0;
263 up=NDownloadSuffix-1;
264 while (down<=up) {
265 center=(down+up)/2;
266 cmp=strcasecmp(suffix,DownloadSuffixIndex[center]);
267 if (cmp == 0) return(true);
268 if (cmp < 0)
269 up = center-1;
270 else
271 down = center+1;
272 }
273 return(false);
274 }
275