]>
git.ipfire.org Git - thirdparty/sarg.git/blob - download.c
2 * SARG Squid Analysis Report Generator http://sarg.sourceforge.net
6 * please look at http://sarg.sourceforge.net/donations.php
8 * http://sourceforge.net/projects/sarg/forums/forum/363374
9 * ---------------------------------------------------------------------
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License as published by
13 * the Free Software Foundation; either version 2 of the License, or
14 * (at your option) any later version.
16 * This program is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 * GNU General Public License for more details.
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, write to the Free Software
23 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111, USA.
27 #include "include/conf.h"
28 #include "include/defs.h"
30 /*@null@*/static char *DownloadSuffix
=NULL
;
31 /*@null@*/static char **DownloadSuffixIndex
=NULL
;
32 static int NDownloadSuffix
=0;
34 void download_report(void)
37 FILE *fp_in
= NULL
, *fp_ou
= NULL
;
41 char report_in
[MAXLEN
];
54 struct getwordstruct gwarea
;
56 struct userinfostruct
*uinfo
;
61 snprintf(report_in
,sizeof(report_in
),"%s/sarg/download.log",TempDir
);
62 if(access(report_in
, R_OK
) != 0)
65 snprintf(report
,sizeof(report
),"%s/download.html",outdirname
);
67 if((fp_in
=MY_FOPEN(report_in
,"r"))==NULL
) {
68 debuga(_("(download) Cannot open log file %s\n"),report_in
);
72 if((fp_ou
=MY_FOPEN(report
,"w"))==NULL
) {
73 debuga(_("(download) Cannot open log file %s\n"),report
);
77 write_html_header(fp_ou
,(IndexTree
== INDEX_TREE_DATE
) ? 3 : 1,_("Downloads"));
78 fputs("<tr><td class=\"header_c\">",fp_ou
);
79 fprintf(fp_ou
,_("Period: %s"),period
.html
);
80 fputs("</td></tr>\n",fp_ou
);
81 fprintf(fp_ou
,"<tr><th class=\"header_c\">%s</th></tr>\n",_("Downloads"));
82 close_html_header(fp_ou
);
84 fputs("<div class=\"report\"><table cellpadding=\"0\" cellspacing=\"2\">\n",fp_ou
);
85 fprintf(fp_ou
,"<tr><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th></tr>\n",_("USERID"),_("IP/NAME"),_("DATE/TIME"),_("ACCESSED SITE"));
87 if ((line
=longline_create())==NULL
) {
88 debuga(_("Not enough memory to read the downloaded files\n"));
92 while((buf
=longline_read(fp_in
,line
))!=NULL
) {
93 getword_start(&gwarea
,buf
);
94 if (getword(data
,sizeof(data
),&gwarea
,'\t')<0 || getword(hora
,sizeof(hora
),&gwarea
,'\t')<0 ||
95 getword(user
,sizeof(user
),&gwarea
,'\t')<0 || getword(ip
,sizeof(ip
),&gwarea
,'\t')<0) {
96 debuga(_("There is a broken record or garbage in file %s\n"),report_in
);
99 if (getword_ptr(buf
,&url
,&gwarea
,'\t')<0) {
100 debuga(_("There is a broken url in file %s\n"),report_in
);
104 uinfo
=userinfo_find_from_id(user
);
106 debuga(_("Unknown user ID %s in file %s\n"),user
,report_in
);
116 if(strcmp(ouser
,user
) != 0) {
120 if(strcmp(oip
,ip
) != 0) {
126 if(DownloadReportLimit
) {
127 if(strcmp(ouser2
,uinfo
->label
) == 0) {
131 strcpy(ouser2
,uinfo
->label
);
133 if(count
>= DownloadReportLimit
)
137 for (i
=strlen(url
)-1 ; i
>=0 && (unsigned char)url
[i
]<' ' ; i
--) url
[i
]=0;
141 fprintf(fp_ou
,"<td class=\"data\">%s</td><td class=\"data\">%s</td>",uinfo
->label
,ip
);
143 fputs("<td class=\"data\"></td><td class=\"data\"></td>",fp_ou
);
144 fprintf(fp_ou
,"<td class=\"data\">%s-%s</td><td class=\"data2\">",data
,hora
);
145 if(BlockIt
[0]!='\0') {
146 fprintf(fp_ou
,"<a href=\"%s%s?url=\"",wwwDocumentRoot
,BlockIt
);
147 output_html_url(fp_ou
,url
);
148 fprintf(fp_ou
,"\"><img src=\"%s/sarg-squidguard-block.png\"></a> ",ImageFile
);
150 fputs("<a href=\"http://",fp_ou
);
151 output_html_url(fp_ou
,url
);
152 fputs("\">http://",fp_ou
);
153 output_html_string(fp_ou
,url
,100);
154 fputs("</a></td></tr>\n",fp_ou
);
157 longline_destroy(&line
);
159 fputs("</table></div>\n",fp_ou
);
160 if (write_html_trailer(fp_ou
)<0)
161 debuga(_("Write error in file %s\n"),report
);
162 if (fclose(fp_ou
)==EOF
)
163 debuga(_("Failed to close file %s - %s\n"),report
,strerror(errno
));
170 void free_download(void)
172 if (DownloadSuffix
) {
173 free(DownloadSuffix
);
176 if (DownloadSuffixIndex
) {
177 free(DownloadSuffixIndex
);
178 DownloadSuffixIndex
=NULL
;
183 void set_download_suffix(const char *list
)
191 DownloadSuffix
=strdup(list
);
192 if (!DownloadSuffix
) {
193 debuga(_("Download suffix list too long\n"));
197 for (i
=0 ; list
[i
] ; i
++)
198 if (list
[i
] == ',') j
++;
199 DownloadSuffixIndex
=malloc(j
*sizeof(char *));
200 if (!DownloadSuffixIndex
) {
201 debuga(_("Too many download suffixes\n"));
205 str
= DownloadSuffix
;
206 for (i
=0 ; DownloadSuffix
[i
] ; i
++) {
207 if (DownloadSuffix
[i
] == ',') {
208 DownloadSuffix
[i
] = '\0';
211 for (j
=0 ; j
<NDownloadSuffix
&& (cmp
=strcasecmp(str
,DownloadSuffixIndex
[j
]))>0 ; j
++);
213 for (k
=NDownloadSuffix
; k
>j
; k
--)
214 DownloadSuffixIndex
[k
]=DownloadSuffixIndex
[k
-1];
216 DownloadSuffixIndex
[j
]=str
;
219 str
=DownloadSuffix
+i
+1;
225 for (j
=0 ; j
<NDownloadSuffix
&& (cmp
=strcasecmp(str
,DownloadSuffixIndex
[j
]))>0 ; j
++);
227 for (k
=NDownloadSuffix
; k
>j
; k
--)
228 DownloadSuffixIndex
[k
]=DownloadSuffixIndex
[k
-1];
230 DownloadSuffixIndex
[j
]=str
;
235 bool is_download_suffix(const char *url
)
239 int down
, up
, center
;
242 const int max_suffix
=10;
244 if (DownloadSuffix
== NULL
|| NDownloadSuffix
== 0) return(false);
246 urllen
=strlen(url
)-1;
247 if (urllen
<=0) return(false);
248 if (url
[urllen
] == '.') return(false); //reject a single trailing dot
249 for (i
=0 ; i
<urllen
&& (url
[i
]!='/' || url
[i
+1]=='/') && url
[i
]!='?' ; i
++);
250 if (i
>=urllen
) return(false); // url is a hostname without any path or file to download
252 for (i
=0 ; i
<=max_suffix
&& i
<urllen
&& url
[urllen
-i
]!='.' ; i
++)
253 if (url
[urllen
-i
] == '/' || url
[urllen
-i
] == '?') return(false);
254 if (i
>max_suffix
|| i
>=urllen
) return(false);
256 suffix
=url
+urllen
-i
+1;
258 up
=NDownloadSuffix
-1;
261 cmp
=strcasecmp(suffix
,DownloadSuffixIndex
[center
]);
262 if (cmp
== 0) return(true);