From: eldy <> Date: Wed, 8 May 2002 16:19:39 +0000 (+0000) Subject: no message X-Git-Tag: AWSTATS_4_1_BETA~74 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=d5b32b72361d18de29c97af2f5502df939a786d8;p=thirdparty%2FAWStats.git no message --- diff --git a/wwwroot/cgi-bin/awstats.model.conf b/wwwroot/cgi-bin/awstats.model.conf index afdcad3e..5e9b8216 100644 --- a/wwwroot/cgi-bin/awstats.model.conf +++ b/wwwroot/cgi-bin/awstats.model.conf @@ -316,7 +316,7 @@ NotPageList="css js class gif jpg jpeg png bmp" # By default, AWStats considers that records found in log file are successful # hits if HTTP code returned by server is a valid HTTP code (200 and 304). # Any other code are reported in HTTP error chart. -# However in some specific environnement, with web server HTTP redirection, +# However in some specific environment, with web server HTTP redirection, # you can choose to also accept other codes. # Example: "200 304 302 305" # Default: "200 304" @@ -327,7 +327,7 @@ ValidHTTPCodes="200 304" # pages. This is primarily used to differentiate between the URLs of dynamic # pages. If set to 1, mypage.html?id=x and mypage.html?id=y are counted as # two different pages. Warning, when set to 1, memory required to run AWStats -# is doubled. +# is doubled. Try to avoid using this parameter on very large web sites. # Possible values: # 0 - URLs are cleaned from the query string (ie: "/mypage.html") # 1 - Full URL with query string is used (ie: "/mypage.html?x=y") @@ -343,11 +343,12 @@ URLWithQuery=0 WarningMessages=1 # To help you to detect if your log format is good, AWStats report an error -# if the first NbOfLinesForCorruptedLog lines have all a format that does not +# if all the first NbOfLinesForCorruptedLog lines have a format that does not # match the LogFormat parameter. # However, some worm virus attack on your web server can result in a very high # number of corrupted lines in your log. So if you experience awstats stop -# because of bad virus records, you can increase this parameter (very rare). +# because of bad virus records at the beginning of your log file, you can +# increase this parameter (very rare). # Default: 50 # NbOfLinesForCorruptedLog=50 @@ -373,15 +374,16 @@ WrapperScript="" # OPTIONAL ACCURACY SETUP SECTION (Not required but increase AWStats features) #----------------------------------------------------------------------------- -# You can change value for following option to increase AWStats capabilities -# (but this reduce AWStats speed). -# Possible values: 0, 1 or 2 +# You can change value for following option to disable/enable AWStats capabilities. +# Possible values: 0, 1 +# Possible values: 0, 1 or 2 for LevelForRobotsDetection # Default: 1 +# Default: 2 for LevelForRobotsDetection # -LevelForRobotsDetection=1 # 0 will increase AWStats speed by 1%. -LevelForBrowsersDetection=1 # 0 disables Browsers detection. No speed gain. -LevelForOSDetection=1 # 0 disables OS detection. No speed gain. -LevelForRefererAnalyze=1 # 0 will increase AWStats speed by 6%. +LevelForRobotsDetection=2 # 0 will increase AWStats speed by 1%. +LevelForBrowsersDetection=1 # 0 disables Browsers detection. No speed gain. +LevelForOSDetection=1 # 0 disables OS detection. No speed gain. +LevelForRefererAnalyze=1 # 0 will increase AWStats speed by 6%. diff --git a/wwwroot/cgi-bin/awstats.pl b/wwwroot/cgi-bin/awstats.pl index 46c90af1..a2032e1d 100644 --- a/wwwroot/cgi-bin/awstats.pl +++ b/wwwroot/cgi-bin/awstats.pl @@ -85,8 +85,6 @@ $UpdateStats, $URLWithQuery)= (0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0); my ($AllowToUpdateStatsFromBrowser, $ArchiveLogRecords, $DetailedReportsOnNewWindows, $FirstDayOfWeek, $SaveDatabaseFilesWithPermissionsForEveryone, -$LevelForRobotsDetection, $LevelForBrowsersDetection, $LevelForOSDetection, -$LevelForSearchEnginesDetection, $LevelForKeywordsDetection, $LevelForRefererAnalyze, $ShowHeader, $ShowMenu, $ShowMonthDayStats, $ShowDaysOfWeekStats, $ShowHoursStats, $ShowDomainsStats, $ShowHostsStats, $ShowRobotsStats, $ShowSessionsStats, $ShowPagesStats, $ShowFileTypesStats, @@ -94,7 +92,10 @@ $ShowBrowsersStats, $ShowOSStats, $ShowOriginStats, $ShowKeyphrasesStats, $ShowKeywordsStats, $ShowHTTPErrorsStats, $ShowFlagLinks, $ShowLinksOnUrl, $WarningMessages)= -(1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1); +(1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1); +my ($LevelForRobotsDetection, $LevelForBrowsersDetection, $LevelForOSDetection, $LevelForRefererAnalyze, +$LevelForSearchEnginesDetection, $LevelForKeywordsDetection)= +(2,1,1,1,1,1); my ($ArchiveFileName, $DefaultFile, $HTMLHeadSection, $HTMLEndSection, $LinksToWhoIs, $LogFile, $LogFormat, $Logo, $LogoLink, $StyleSheet, $WrapperScript, $SiteDomain)= ("","","","","","","","","","","",""); @@ -121,7 +122,7 @@ my @HostAliases=(); my @AllowAccessFromWebToFollowingAuthenticatedUsers=(); my @OnlyFiles = my @SkipDNSLookupFor = my @SkipFiles = my @SkipHosts = (); my @DOWIndex=(); -my @RobotArrayList = my @RobotsSearchIDOrder = (); +my @RobotsSearchIDOrder = (); #my @RobotsSearchIDOrder_list1=(); my @RobotsSearchIDOrder_list2=(); my @RobotsSearchIDOrder_list3=(); #my @BrowsersSearchIDOrder = my @OSSearchIDOrder = my @SearchEnginesSearchIDOrder(); #my @WordsToCleanSearchUrl=(); @@ -665,12 +666,12 @@ sub Read_Config_File { if ($param =~ /^Expires/) { $Expires=$value; next; } if ($param =~ /^WrapperScript/) { $WrapperScript=$value; next; } # Read optional accuracy setup section - if ($param =~ /^LevelForRobotsDetection/) { $LevelForRobotsDetection=$value; next; } # Not used yet - if ($param =~ /^LevelForBrowsersDetection/) { $LevelForBrowsersDetection=$value; next; } # Not used yet - if ($param =~ /^LevelForOSDetection/) { $LevelForOSDetection=$value; next; } # Not used yet - if ($param =~ /^LevelForSearchEnginesDetection/) { $LevelForSearchEnginesDetection=$value; next; } # Not used yet - if ($param =~ /^LevelForRefererAnalyze/) { $LevelForRefererAnalyze=$value; next; } # Not used yet - if ($param =~ /^LevelForKeywordsDetection/) { $LevelForKeywordsDetection=$value; next; } # Not used yet + if ($param =~ /^LevelForRobotsDetection/) { $LevelForRobotsDetection=$value; next; } + if ($param =~ /^LevelForBrowsersDetection/) { $LevelForBrowsersDetection=$value; next; } + if ($param =~ /^LevelForOSDetection/) { $LevelForOSDetection=$value; next; } + if ($param =~ /^LevelForRefererAnalyze/) { $LevelForRefererAnalyze=$value; next; } + if ($param =~ /^LevelForSearchEnginesDetection/) { $LevelForSearchEnginesDetection=$value; next; } + if ($param =~ /^LevelForKeywordsDetection/) { $LevelForKeywordsDetection=$value; next; } # Read optional appearance setup section if ($param =~ /^Lang/) { $Lang=$value; next; } if ($param =~ /^DirLang/) { $DirLang=$value; next; } @@ -947,11 +948,11 @@ sub Check_Config { if ($SplitSearchString !~ /[0-1]/) { $SplitSearchString=0; } if ($Expires !~ /^[\d]+/) { $Expires=0; } # Optional accuracy setup section - if ($LevelForRobotsDetection !~ /^[\d]+/) { $LevelForRobotsDetection=1; } + if ($LevelForRobotsDetection !~ /^[\d]+/) { $LevelForRobotsDetection=2; } if ($LevelForBrowsersDetection !~ /^[\d]+/) { $LevelForBrowsersDetection=1; } if ($LevelForOSDetection !~ /^[\d]+/) { $LevelForOSDetection=1; } - if ($LevelForSearchEnginesDetection !~ /^[\d]+/) { $LevelForSearchEnginesDetection=1; } if ($LevelForRefererAnalyze !~ /^[\d]+/) { $LevelForRefererAnalyze=1; } + if ($LevelForSearchEnginesDetection !~ /^[\d]+/) { $LevelForSearchEnginesDetection=1; } if ($LevelForKeywordsDetection !~ /^[\d]+/) { $LevelForKeywordsDetection=1; } # Optional appearance setup section if ($MaxRowsInHTMLOutput !~ /^[\d]+/ || $MaxRowsInHTMLOutput<1) { $MaxRowsInHTMLOutput=1000; } @@ -2472,18 +2473,19 @@ if ((! $ENV{"GATEWAY_INTERFACE"}) && (! $SiteConfig)) { print "\n"; print "Now supports/detects:\n"; print " Reverse DNS lookup\n"; - print " Number of visits, unique visitors, list of last visits\n"; - print " Hosts list and unresolved IP addresses list\n"; - print " Days of week and rush hours\n"; + print " Number of visits, number of unique visitors\n"; + print " Visits duration and list of last visits\n"; print " Authenticated users\n"; - print " Viewed and entry pages\n"; - print " Type of files and Web compression\n"; + print " Days of week and rush hours\n"; + print " Hosts list and unresolved IP addresses list\n"; + print " Most viewed, entry and exit pages\n"; + print " Files type and Web compression\n"; print " ".(scalar keys %DomainsHashIDLib)." domains/countries\n"; print " ".(scalar keys %BrowsersHashIDLib)." browsers\n"; print " ".(scalar keys %OSHashLib)." operating systems\n"; print " ".(scalar keys %RobotsHashIDLib)." robots\n"; print " ".(scalar keys %SearchEnginesHashIDLib)." search engines (and keyphrases/keywords used from them)\n"; - print " All HTTP errors\n"; + print " All HTTP errors with last referrer\n"; print " Report by day/month/year\n"; print " And a lot of other advanced options...\n"; print "New versions and FAQ at http://awstats.sourceforge.net\n"; @@ -2610,9 +2612,10 @@ if ($Debug) { debug("UpdateStats is $UpdateStats",2); } if ($UpdateStats) { # Init RobotsSearchIDOrder required for update process - push @RobotArrayList,"list1"; - push @RobotArrayList,"list2"; - push @RobotArrayList,"list3"; + my @RobotArrayList; + if ($LevelForRobotsDetection >= 1) { push @RobotArrayList,"list1"; } + if ($LevelForRobotsDetection >= 2) { push @RobotArrayList,"list2"; } + if ($LevelForRobotsDetection >= 1) { push @RobotArrayList,"list3"; } # Always added foreach my $key (@RobotArrayList) { push @RobotsSearchIDOrder,@{"RobotsSearchIDOrder_$key"}; if ($Debug) { debug("Add ".@{"RobotsSearchIDOrder_$key"}." elements from RobotsSearchIDOrder_$key into RobotsSearchIDOrder",2); }