mirror of
https://github.com/darold/squidanalyzer.git
synced 2025-07-31 01:44:29 +02:00
Add WriteDelay configuration directive (default to 3600 - stores one hour of statistics from log file) to be able to save ressources on huge log file. On small log file it is not a good idea to set a lower value as SquidAnalyzer will spend his time to write and read from disk.
This commit is contained in:
parent
0d0a131dd8
commit
66d6d91bff
@ -416,6 +416,7 @@ sub _init
|
|||||||
$self->{SiblingHit} = $options{SiblingHit} || 1;
|
$self->{SiblingHit} = $options{SiblingHit} || 1;
|
||||||
$self->{ImgFormat} = $options{ImgFormat} || 'png';
|
$self->{ImgFormat} = $options{ImgFormat} || 'png';
|
||||||
$self->{Locale} = $options{Locale} || '';
|
$self->{Locale} = $options{Locale} || '';
|
||||||
|
$self->{WriteDelay} = $options{WriteDelay} || 3600;
|
||||||
if ($self->{Lang}) {
|
if ($self->{Lang}) {
|
||||||
open(IN, "$self->{Lang}") or die "ERROR: can't open translation file $self->{Lang}, $!\n";
|
open(IN, "$self->{Lang}") or die "ERROR: can't open translation file $self->{Lang}, $!\n";
|
||||||
while (my $l = <IN>) {
|
while (my $l = <IN>) {
|
||||||
@ -605,7 +606,7 @@ sub _parseData
|
|||||||
}
|
}
|
||||||
|
|
||||||
# Store data when hour change to save memory
|
# Store data when hour change to save memory
|
||||||
if ($self->{tmp_saving} && ($self->{tmp_saving} ne $hour) ) {
|
if ($self->{tmp_saving} && ($time > ($self->{tmp_saving} + $self->{WriteDelay})) ) {
|
||||||
$date =~ /^(\d{4})(\d{2})(\d{2})$/;
|
$date =~ /^(\d{4})(\d{2})(\d{2})$/;
|
||||||
# If the day has changed then we want to save stats of the previous one
|
# If the day has changed then we want to save stats of the previous one
|
||||||
$self->_save_data("$1", "$2", "$3");
|
$self->_save_data("$1", "$2", "$3");
|
||||||
@ -617,7 +618,7 @@ sub _parseData
|
|||||||
$self->{first_year} ||= $self->{last_year};
|
$self->{first_year} ||= $self->{last_year};
|
||||||
$self->{first_month} ||= $self->{last_month};
|
$self->{first_month} ||= $self->{last_month};
|
||||||
|
|
||||||
$self->{tmp_saving} = $hour;
|
$self->{tmp_saving} = $time;
|
||||||
|
|
||||||
#### Store client statistics
|
#### Store client statistics
|
||||||
$self->{stat_user_hour}{$id}{$hour}{hits}++;
|
$self->{stat_user_hour}{$id}{$hour}{hits}++;
|
||||||
|
@ -84,7 +84,15 @@ TransfertUnit BYTES
|
|||||||
MinPie 2
|
MinPie 2
|
||||||
|
|
||||||
# Set this to your locale to display generated date in your language. Default
|
# Set this to your locale to display generated date in your language. Default
|
||||||
# is to use the current locale of the system. If you want date in German for
|
# is to use strftime. If you want date in German for example, set it to de_DE.
|
||||||
# example, set it to de_DE. For french, fr_FR should do the work.
|
# For french, fr_FR should do the work.
|
||||||
#Locale en_US
|
#Locale en_US
|
||||||
|
|
||||||
|
# By default SquidAnalyzer is saving current collected statistics each time
|
||||||
|
# a new hour is found in log file. Most of the time this is enough but if
|
||||||
|
# you have huge log file and don't have enough memory this will slow down the
|
||||||
|
# parser by forcing Perl to use temporaries files. Use lower value following
|
||||||
|
# your memory and the size of your log file, on very huge log file with lot of
|
||||||
|
# requests/seconde a value of 30 minutes (1800) or less should help.
|
||||||
|
WriteDelay 3600
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user