Merge pull request #90 from ClusterLabs/anvil-tools-dev
Anvil tools dev
This commit is contained in:
commit
67c73cdef5
@ -216,9 +216,10 @@ sub archive_database
|
||||
|
||||
# Make sure I have sane values.
|
||||
$anvil->data->{sys}{database}{archive}{compress} = 1 if not defined $anvil->data->{sys}{database}{archive}{compress};
|
||||
$anvil->data->{sys}{database}{archive}{count} = 10000 if not defined $anvil->data->{sys}{database}{archive}{count};
|
||||
$anvil->data->{sys}{database}{archive}{division} = 25000 if not defined $anvil->data->{sys}{database}{archive}{division};
|
||||
$anvil->data->{sys}{database}{archive}{trigger} = 20000 if not defined $anvil->data->{sys}{database}{archive}{trigger};
|
||||
$anvil->data->{sys}{database}{archive}{count} = 25000 if not defined $anvil->data->{sys}{database}{archive}{count};
|
||||
$anvil->data->{sys}{database}{archive}{division} = 30000 if not defined $anvil->data->{sys}{database}{archive}{division};
|
||||
$anvil->data->{sys}{database}{archive}{trigger} = 50000 if not defined $anvil->data->{sys}{database}{archive}{trigger};
|
||||
$anvil->data->{sys}{database}{archive}{save_to_disk} = 0
|
||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
|
||||
"sys::database::archive::compress" => $anvil->data->{sys}{database}{archive}{compress},
|
||||
"sys::database::archive::count" => $anvil->data->{sys}{database}{archive}{count},
|
||||
@ -234,25 +235,6 @@ sub archive_database
|
||||
"sys::database::archive::directory" => $anvil->data->{sys}{database}{archive}{directory},
|
||||
}});
|
||||
}
|
||||
if (not -d $anvil->data->{sys}{database}{archive}{directory})
|
||||
{
|
||||
my $failed = $anvil->Storage->make_directory({
|
||||
debug => $debug,
|
||||
directory => $anvil->data->{sys}{database}{archive}{directory},
|
||||
mode => "0700",
|
||||
user => "root",
|
||||
group => "root",
|
||||
});
|
||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { failed => $failed }});
|
||||
if ($failed)
|
||||
{
|
||||
# No directory to archive into...
|
||||
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => $debug, priority => "err", key => "error_0098", variables => {
|
||||
directory => $anvil->data->{sys}{database}{archive}{directory},
|
||||
}});
|
||||
return("!!error!!");
|
||||
}
|
||||
}
|
||||
|
||||
# Make sure the numerical values are sane
|
||||
if ($anvil->data->{sys}{database}{archive}{count} !~ /^\d+$/)
|
||||
@ -15609,10 +15591,36 @@ sub _archive_table
|
||||
|
||||
my $offset = $count - $records_per_loop;
|
||||
my $loop = 0;
|
||||
my $do_delete = 1;
|
||||
for (1..$loops)
|
||||
{
|
||||
# We need to date stamp from the closest record to the offset.
|
||||
$loop++;
|
||||
|
||||
# Are we archiving to disk?
|
||||
$do_delete = 1;
|
||||
if ($anvil->data->{sys}{database}{archive}{save_to_disk})
|
||||
{
|
||||
if (not -d $anvil->data->{sys}{database}{archive}{directory})
|
||||
{
|
||||
my $failed = $anvil->Storage->make_directory({
|
||||
debug => $debug,
|
||||
directory => $anvil->data->{sys}{database}{archive}{directory},
|
||||
mode => "0700",
|
||||
user => "root",
|
||||
group => "root",
|
||||
});
|
||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { failed => $failed }});
|
||||
if ($failed)
|
||||
{
|
||||
# No directory to archive into...
|
||||
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => $debug, priority => "err", key => "error_0098", variables => {
|
||||
directory => $anvil->data->{sys}{database}{archive}{directory},
|
||||
}});
|
||||
return("!!error!!");
|
||||
}
|
||||
}
|
||||
|
||||
my $sql_file = "
|
||||
-- Dump created at: [".$anvil->Get->date_and_time()."]
|
||||
SET statement_timeout = 0;
|
||||
@ -15709,13 +15717,17 @@ COPY history.".$table." (";
|
||||
|
||||
if ($failed)
|
||||
{
|
||||
$do_delete = 0;
|
||||
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 0, priority => "err", key => "error_0099", variables => {
|
||||
file => $archive_file,
|
||||
table => $table,
|
||||
}});
|
||||
last;
|
||||
}
|
||||
else
|
||||
}
|
||||
|
||||
# Do Delete.
|
||||
if ($do_delete)
|
||||
{
|
||||
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, key => "log_0283"});
|
||||
$vacuum = 1;
|
||||
@ -15739,7 +15751,6 @@ COPY history.".$table." (";
|
||||
|
||||
$offset -= $records_per_loop;
|
||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { offset => $offset }});
|
||||
|
||||
}
|
||||
|
||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { vacuum => $vacuum }});
|
||||
@ -16101,8 +16112,10 @@ ORDER BY
|
||||
|
||||
# To avoid resyncs triggered by the differences that might occur if the row
|
||||
# count changed slightly between counting both/all DBs, we won't resync
|
||||
# until there's at least 10 rows different.
|
||||
if ($difference > 10)
|
||||
# until there's at least 10 rows different. The exception is the hosts file,
|
||||
# as it needs to resync on a single line difference when adding peer Striker
|
||||
# dashboards.
|
||||
if (($table eq "hosts") or ($difference > 10))
|
||||
{
|
||||
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, priority => "alert", key => "log_0219", variables => {
|
||||
missing => $difference,
|
||||
|
@ -76,10 +76,10 @@ feature::scancore::disable::preventative-live-migration = 0
|
||||
# NOTE: If the archive directory doesn't exist, Anvil! will create it
|
||||
# automatically the first time it is needed.
|
||||
sys::database::archive::compress = 1
|
||||
sys::database::archive::count = 10000
|
||||
sys::database::archive::trigger = 50000
|
||||
sys::database::archive::count = 25000
|
||||
sys::database::archive::division = 30000
|
||||
sys::database::archive::directory = /usr/local/anvil/archives/
|
||||
sys::database::archive::division = 250000
|
||||
sys::database::archive::trigger = 20000
|
||||
|
||||
# This puts a limit on how many queries (writes, generally) to make in a single batch transaction. This is
|
||||
# useful when doing very large transacions, like resync'ing a large table, by limiting how long a given
|
||||
|
@ -17,6 +17,9 @@
|
||||
# - Write a script that runs in crontab at UTC 17:00 that sends an email if Scancore or anvil-daemon are disabled.
|
||||
# - Examine limites in: https://www.freedesktop.org/software/systemd/man/systemd.exec.html#LimitCPU=
|
||||
# - Write a background program to scan the BCN and uses OUI data to try and find / auto-configure PDUs and UPSes
|
||||
# -
|
||||
# - Increase DRBD's default timeout
|
||||
# - Check for and enable persistent journald logging
|
||||
#
|
||||
# NOTE:
|
||||
# - For later; 'reboot --force --force' immediately kills the OS, like disabling ACPI on EL6 and hitting the
|
||||
|
@ -375,7 +375,7 @@ sub process_rename_file
|
||||
progress => 100,
|
||||
message => "job_0140,!!file_path!".$old_file_path."!!",
|
||||
});
|
||||
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, key => "job_0140", variables => { old_file_path => $old_file_path }});
|
||||
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, key => "job_0140", variables => { file_path => $old_file_path }});
|
||||
$anvil->nice_exit({exit_code => 0});
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user