]>
git.ipfire.org Git - thirdparty/sarg.git/blob - download.c
2 * SARG Squid Analysis Report Generator http://sarg.sourceforge.net
6 * please look at http://sarg.sourceforge.net/donations.php
8 * http://sourceforge.net/projects/sarg/forums/forum/363374
9 * ---------------------------------------------------------------------
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License as published by
13 * the Free Software Foundation; either version 2 of the License, or
14 * (at your option) any later version.
16 * This program is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 * GNU General Public License for more details.
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, write to the Free Software
23 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111, USA.
27 #include "include/conf.h"
28 #include "include/defs.h"
30 /*@null@*/static char *DownloadSuffix
=NULL
;
31 /*@null@*/static char **DownloadSuffixIndex
=NULL
;
32 static int NDownloadSuffix
=0;
34 void download_report(void)
36 FILE *fp_in
= NULL
, *fp_ou
= NULL
;
40 char report_in
[MAXLEN
];
54 struct getwordstruct gwarea
;
56 struct userinfostruct
*uinfo
;
62 snprintf(report_in
,sizeof(report_in
),"%s/download.log",tmp
);
63 if(access(report_in
, R_OK
) != 0)
66 snprintf(report
,sizeof(report
),"%s/download.html",outdirname
);
68 if((fp_in
=MY_FOPEN(report_in
,"r"))==NULL
) {
69 debuga(_("(download) Cannot open log file %s\n"),report_in
);
73 if((fp_ou
=MY_FOPEN(report
,"w"))==NULL
) {
74 debuga(_("(download) Cannot open log file %s\n"),report
);
78 write_html_header(fp_ou
,(IndexTree
== INDEX_TREE_DATE
) ? 3 : 1,_("Downloads"),HTML_JS_NONE
);
79 fputs("<tr><td class=\"header_c\">",fp_ou
);
80 fprintf(fp_ou
,_("Period: %s"),period
.html
);
81 fputs("</td></tr>\n",fp_ou
);
82 fprintf(fp_ou
,"<tr><th class=\"header_c\">%s</th></tr>\n",_("Downloads"));
83 close_html_header(fp_ou
);
85 fputs("<div class=\"report\"><table cellpadding=\"0\" cellspacing=\"2\">\n",fp_ou
);
86 fprintf(fp_ou
,"<tr><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th></tr>\n",_("USERID"),_("IP/NAME"),_("DATE/TIME"),_("ACCESSED SITE"));
88 if ((line
=longline_create())==NULL
) {
89 debuga(_("Not enough memory to read the downloaded files\n"));
93 while((buf
=longline_read(fp_in
,line
))!=NULL
) {
94 getword_start(&gwarea
,buf
);
95 if (getword(data
,sizeof(data
),&gwarea
,'\t')<0 || getword(hora
,sizeof(hora
),&gwarea
,'\t')<0 ||
96 getword(user
,sizeof(user
),&gwarea
,'\t')<0 || getword(ip
,sizeof(ip
),&gwarea
,'\t')<0) {
97 debuga(_("There is a broken record or garbage in file %s\n"),report_in
);
100 if (getword_ptr(buf
,&url
,&gwarea
,'\t')<0) {
101 debuga(_("There is a broken url in file %s\n"),report_in
);
104 if (sscanf(data
,"%d/%d/%d",&day
,&month
,&year
)!=3) continue;
105 computedate(year
,month
,day
,&t
);
106 strftime(data
,sizeof(data
),"%x",&t
);
108 uinfo
=userinfo_find_from_id(user
);
110 debuga(_("Unknown user ID %s in file %s\n"),user
,report_in
);
120 if(strcmp(ouser
,user
) != 0) {
124 if(strcmp(oip
,ip
) != 0) {
130 if(DownloadReportLimit
) {
131 if(strcmp(ouser2
,uinfo
->label
) == 0) {
135 strcpy(ouser2
,uinfo
->label
);
137 if(count
>= DownloadReportLimit
)
141 for (i
=strlen(url
)-1 ; i
>=0 && (unsigned char)url
[i
]<' ' ; i
--) url
[i
]=0;
145 fprintf(fp_ou
,"<td class=\"data\"><a href=\"%s/%s.html\">%s</a></td><td class=\"data\">%s</td>",uinfo
->filename
,uinfo
->filename
,uinfo
->label
,ip
);
147 fputs("<td class=\"data\"></td><td class=\"data\"></td>",fp_ou
);
148 fprintf(fp_ou
,"<td class=\"data\">%s-%s</td><td class=\"data2\">",data
,hora
);
149 if(BlockIt
[0]!='\0' && url
[0]!=ALIAS_PREFIX
) {
150 fprintf(fp_ou
,"<a href=\"%s%s?url=\"",wwwDocumentRoot
,BlockIt
);
151 output_html_url(fp_ou
,url
);
152 fprintf(fp_ou
,"\"><img src=\"%s/sarg-squidguard-block.png\"></a> ",ImageFile
);
154 output_html_link(fp_ou
,url
,100);
155 fputs("</td></tr>\n",fp_ou
);
158 longline_destroy(&line
);
160 fputs("</table></div>\n",fp_ou
);
161 if (write_html_trailer(fp_ou
)<0)
162 debuga(_("Write error in file %s\n"),report
);
163 if (fclose(fp_ou
)==EOF
)
164 debuga(_("Failed to close file %s - %s\n"),report
,strerror(errno
));
166 if (unlink(report_in
)) {
167 debuga(_("Cannot delete %s - %s\n"),report_in
,strerror(errno
));
174 void free_download(void)
176 if (DownloadSuffix
) {
177 free(DownloadSuffix
);
180 if (DownloadSuffixIndex
) {
181 free(DownloadSuffixIndex
);
182 DownloadSuffixIndex
=NULL
;
187 void set_download_suffix(const char *list
)
195 DownloadSuffix
=strdup(list
);
196 if (!DownloadSuffix
) {
197 debuga(_("Download suffix list too long\n"));
201 for (i
=0 ; list
[i
] ; i
++)
202 if (list
[i
] == ',') j
++;
203 DownloadSuffixIndex
=malloc(j
*sizeof(char *));
204 if (!DownloadSuffixIndex
) {
205 debuga(_("Too many download suffixes\n"));
209 str
= DownloadSuffix
;
210 for (i
=0 ; DownloadSuffix
[i
] ; i
++) {
211 if (DownloadSuffix
[i
] == ',') {
212 DownloadSuffix
[i
] = '\0';
215 for (j
=0 ; j
<NDownloadSuffix
&& (cmp
=strcasecmp(str
,DownloadSuffixIndex
[j
]))>0 ; j
++);
217 for (k
=NDownloadSuffix
; k
>j
; k
--)
218 DownloadSuffixIndex
[k
]=DownloadSuffixIndex
[k
-1];
220 DownloadSuffixIndex
[j
]=str
;
223 str
=DownloadSuffix
+i
+1;
229 for (j
=0 ; j
<NDownloadSuffix
&& (cmp
=strcasecmp(str
,DownloadSuffixIndex
[j
]))>0 ; j
++);
231 for (k
=NDownloadSuffix
; k
>j
; k
--)
232 DownloadSuffixIndex
[k
]=DownloadSuffixIndex
[k
-1];
234 DownloadSuffixIndex
[j
]=str
;
239 bool is_download_suffix(const char *url
)
243 int down
, up
, center
;
246 const int max_suffix
=10;
248 if (DownloadSuffix
== NULL
|| NDownloadSuffix
== 0) return(false);
250 urllen
=strlen(url
)-1;
251 if (urllen
<=0) return(false);
252 if (url
[urllen
] == '.') return(false); //reject a single trailing dot
253 for (i
=0 ; i
<urllen
&& (url
[i
]!='/' || url
[i
+1]=='/') && url
[i
]!='?' ; i
++);
254 if (i
>=urllen
) return(false); // url is a hostname without any path or file to download
256 for (i
=0 ; i
<=max_suffix
&& i
<urllen
&& url
[urllen
-i
]!='.' ; i
++)
257 if (url
[urllen
-i
] == '/' || url
[urllen
-i
] == '?') return(false);
258 if (i
>max_suffix
|| i
>=urllen
) return(false);
260 suffix
=url
+urllen
-i
+1;
262 up
=NDownloadSuffix
-1;
265 cmp
=strcasecmp(suffix
,DownloadSuffixIndex
[center
]);
266 if (cmp
== 0) return(true);