]>
git.ipfire.org Git - thirdparty/sarg.git/blob - download.c
2 * SARG Squid Analysis Report Generator http://sarg.sourceforge.net
6 * please look at http://sarg.sourceforge.net/donations.php
8 * http://sourceforge.net/projects/sarg/forums/forum/363374
9 * ---------------------------------------------------------------------
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License as published by
13 * the Free Software Foundation; either version 2 of the License, or
14 * (at your option) any later version.
16 * This program is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 * GNU General Public License for more details.
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, write to the Free Software
23 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111, USA.
27 #include "include/conf.h"
28 #include "include/defs.h"
30 static char *DownloadSuffix
=NULL
;
31 static char **DownloadSuffixIndex
=NULL
;
32 static int NDownloadSuffix
=0;
34 void download_report(void)
37 FILE *fp_in
= NULL
, *fp_ou
= NULL
;
41 char report_in
[MAXLEN
];
42 char wdirname
[MAXLEN
];
56 struct getwordstruct gwarea
;
57 struct longlinestruct line
;
58 struct userinfostruct
*uinfo
;
63 sprintf(report_in
,"%s/sarg/download.log",TempDir
);
64 if(access(report_in
, R_OK
) != 0)
67 strcpy(wdirname
,outdirname
);
68 sprintf(report
,"%s/download.html",wdirname
);
70 strcat(wdirname
,"sarg-period");
72 if ((fp_in
= fopen(wdirname
, "r")) == 0) {
73 debuga(_("(download) Cannot open file: %s\n"),wdirname
);
77 if (!fgets(period
,sizeof(period
),fp_in
)) {
78 debuga(_("(download) read error in %s\n"),wdirname
);
83 if((fp_in
=MY_FOPEN(report_in
,"r"))==NULL
) {
84 fprintf(stderr
, "SARG: (download) %s: %s\n",_("Cannot open log file"),report_in
);
88 if((fp_ou
=MY_FOPEN(report
,"w"))==NULL
) {
89 fprintf(stderr
, "SARG: (download) %s: %s\n",_("Cannot open log file"),report
);
93 write_html_header(fp_ou
,(IndexTree
== INDEX_TREE_DATE
) ? 3 : 1,_("Downloads"));
94 fprintf(fp_ou
,"<tr><td class=\"header_l\">%s: %s</td></tr>\n",_("Period"),period
);
95 fprintf(fp_ou
,"<tr><th class=\"header_c\">%s</th></tr>\n",_("Downloads"));
96 close_html_header(fp_ou
);
98 fputs("<div class=\"report\"><table cellpadding=\"0\" cellspacing=\"2\">\n",fp_ou
);
99 fputs("<tr><td></td></tr>\n",fp_ou
);
100 fprintf(fp_ou
,"<tr><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th><th class=\"header_l\">%s</th></tr>\n",_("USERID"),_("IP/NAME"),_("DATE/TIME"),_("ACCESSED SITE"));
102 if (longline_prepare(&line
)<0) {
103 debuga(_("Not enough memory to read the downloaded files\n"));
107 while((buf
=longline_read(fp_in
,&line
))!=NULL
) {
108 getword_start(&gwarea
,buf
);
109 if (getword(data
,sizeof(data
),&gwarea
,'\t')<0 || getword(hora
,sizeof(hora
),&gwarea
,'\t')<0 ||
110 getword(user
,sizeof(user
),&gwarea
,'\t')<0 || getword(ip
,sizeof(ip
),&gwarea
,'\t')<0) {
111 debuga(_("There is a broken record or garbage in file %s\n"),report_in
);
114 if (getword_ptr(buf
,&url
,&gwarea
,'\t')<0) {
115 debuga(_("There is a broken url in file %s\n"),report_in
);
119 uinfo
=userinfo_find_from_id(user
);
121 debuga(_("Unknown user ID %s in file %s\n"),user
,report_in
);
131 if(strcmp(ouser
,user
) != 0) {
135 if(strcmp(oip
,ip
) != 0) {
141 if(DownloadReportLimit
) {
142 if(strcmp(ouser2
,uinfo
->label
) == 0) {
146 strcpy(ouser2
,uinfo
->label
);
148 if(count
>= DownloadReportLimit
)
152 for (i
=strlen(url
)-1 ; i
>=0 && (unsigned char)url
[i
]<' ' ; i
--) url
[i
]=0;
156 fprintf(fp_ou
,"<td class=\"data\">%s</td><td class=\"data\">%s</td>",uinfo
->label
,ip
);
158 fputs("<td class=\"data\"></td><td class=\"data\"></td>",fp_ou
);
159 fprintf(fp_ou
,"<td class=\"data\">%s-%s</td><td class=\"data2\">",data
,hora
);
160 if(BlockIt
[0]!='\0') {
161 fprintf(fp_ou
,"<a href=\"%s%s?url=\"",wwwDocumentRoot
,BlockIt
);
162 output_html_url(fp_ou
,url
);
163 fprintf(fp_ou
,"\"><img src=\"%s/sarg-squidguard-block.png\"></a> ",ImageFile
);
165 fputs("<a href=\"http://",fp_ou
);
166 output_html_url(fp_ou
,url
);
167 fputs("\">http://",fp_ou
);
168 output_html_string(fp_ou
,url
,100);
169 fputs("</a></td></tr>\n",fp_ou
);
172 longline_free(&line
);
174 fputs("</table></div>\n",fp_ou
);
175 write_html_trailer(fp_ou
);
183 void free_download(void)
185 if (DownloadSuffix
) {
186 free(DownloadSuffix
);
189 if (DownloadSuffixIndex
) {
190 free(DownloadSuffixIndex
);
191 DownloadSuffixIndex
=NULL
;
196 void set_download_suffix(const char *list
)
204 DownloadSuffix
=strdup(list
);
205 if (!DownloadSuffix
) {
206 fprintf(stderr
,"SARG: Download suffix list too long\n");
210 for (i
=0 ; list
[i
] ; i
++)
211 if (list
[i
] == ',') j
++;
212 DownloadSuffixIndex
=malloc(j
*sizeof(char *));
213 if (!DownloadSuffixIndex
) {
214 fprintf(stderr
,"SARG: Too many download suffixes\n");
218 str
= DownloadSuffix
;
219 for (i
=0 ; DownloadSuffix
[i
] ; i
++) {
220 if (DownloadSuffix
[i
] == ',') {
221 DownloadSuffix
[i
] = '\0';
224 for (j
=0 ; j
<NDownloadSuffix
&& (cmp
=strcasecmp(str
,DownloadSuffixIndex
[j
]))>0 ; j
++);
226 for (k
=NDownloadSuffix
; k
>j
; k
--)
227 DownloadSuffixIndex
[k
]=DownloadSuffixIndex
[k
-1];
229 DownloadSuffixIndex
[j
]=str
;
232 str
=DownloadSuffix
+i
+1;
238 for (j
=0 ; j
<NDownloadSuffix
&& (cmp
=strcasecmp(str
,DownloadSuffixIndex
[j
]))>0 ; j
++);
240 for (k
=NDownloadSuffix
; k
>j
; k
--)
241 DownloadSuffixIndex
[k
]=DownloadSuffixIndex
[k
-1];
243 DownloadSuffixIndex
[j
]=str
;
248 int is_download_suffix(const char *url
)
252 int down
, up
, center
;
255 const int max_suffix
=10;
257 if (DownloadSuffix
== NULL
|| NDownloadSuffix
== 0) return(0);
259 urllen
=strlen(url
)-1;
260 if (urllen
<=0) return(0);
261 if (url
[urllen
] == '.') return(0); //reject a single trailing dot
262 for (i
=0 ; i
<urllen
&& (url
[i
]!='/' || url
[i
+1]=='/') && url
[i
]!='?' ; i
++);
263 if (i
>=urllen
) return(0); // url is a hostname without any path or file to download
265 for (i
=0 ; i
<=max_suffix
&& i
<urllen
&& url
[urllen
-i
]!='.' ; i
++)
266 if (url
[urllen
-i
] == '/' || url
[urllen
-i
] == '?') return(0);
267 if (i
>max_suffix
|| i
>=urllen
) return(0);
269 suffix
=url
+urllen
-i
+1;
271 up
=NDownloadSuffix
-1;
274 cmp
=strcasecmp(suffix
,DownloadSuffixIndex
[center
]);
275 if (cmp
== 0) return(1);