Merge pull request #90 from ClusterLabs/anvil-tools-dev
Anvil tools dev
This commit is contained in:
commit
67c73cdef5
@ -215,10 +215,11 @@ sub archive_database
|
|||||||
}
|
}
|
||||||
|
|
||||||
# Make sure I have sane values.
|
# Make sure I have sane values.
|
||||||
$anvil->data->{sys}{database}{archive}{compress} = 1 if not defined $anvil->data->{sys}{database}{archive}{compress};
|
$anvil->data->{sys}{database}{archive}{compress} = 1 if not defined $anvil->data->{sys}{database}{archive}{compress};
|
||||||
$anvil->data->{sys}{database}{archive}{count} = 10000 if not defined $anvil->data->{sys}{database}{archive}{count};
|
$anvil->data->{sys}{database}{archive}{count} = 25000 if not defined $anvil->data->{sys}{database}{archive}{count};
|
||||||
$anvil->data->{sys}{database}{archive}{division} = 25000 if not defined $anvil->data->{sys}{database}{archive}{division};
|
$anvil->data->{sys}{database}{archive}{division} = 30000 if not defined $anvil->data->{sys}{database}{archive}{division};
|
||||||
$anvil->data->{sys}{database}{archive}{trigger} = 20000 if not defined $anvil->data->{sys}{database}{archive}{trigger};
|
$anvil->data->{sys}{database}{archive}{trigger} = 50000 if not defined $anvil->data->{sys}{database}{archive}{trigger};
|
||||||
|
$anvil->data->{sys}{database}{archive}{save_to_disk} = 0
|
||||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
|
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
|
||||||
"sys::database::archive::compress" => $anvil->data->{sys}{database}{archive}{compress},
|
"sys::database::archive::compress" => $anvil->data->{sys}{database}{archive}{compress},
|
||||||
"sys::database::archive::count" => $anvil->data->{sys}{database}{archive}{count},
|
"sys::database::archive::count" => $anvil->data->{sys}{database}{archive}{count},
|
||||||
@ -234,25 +235,6 @@ sub archive_database
|
|||||||
"sys::database::archive::directory" => $anvil->data->{sys}{database}{archive}{directory},
|
"sys::database::archive::directory" => $anvil->data->{sys}{database}{archive}{directory},
|
||||||
}});
|
}});
|
||||||
}
|
}
|
||||||
if (not -d $anvil->data->{sys}{database}{archive}{directory})
|
|
||||||
{
|
|
||||||
my $failed = $anvil->Storage->make_directory({
|
|
||||||
debug => $debug,
|
|
||||||
directory => $anvil->data->{sys}{database}{archive}{directory},
|
|
||||||
mode => "0700",
|
|
||||||
user => "root",
|
|
||||||
group => "root",
|
|
||||||
});
|
|
||||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { failed => $failed }});
|
|
||||||
if ($failed)
|
|
||||||
{
|
|
||||||
# No directory to archive into...
|
|
||||||
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => $debug, priority => "err", key => "error_0098", variables => {
|
|
||||||
directory => $anvil->data->{sys}{database}{archive}{directory},
|
|
||||||
}});
|
|
||||||
return("!!error!!");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Make sure the numerical values are sane
|
# Make sure the numerical values are sane
|
||||||
if ($anvil->data->{sys}{database}{archive}{count} !~ /^\d+$/)
|
if ($anvil->data->{sys}{database}{archive}{count} !~ /^\d+$/)
|
||||||
@ -15607,13 +15589,39 @@ sub _archive_table
|
|||||||
column_count => $column_count
|
column_count => $column_count
|
||||||
}});
|
}});
|
||||||
|
|
||||||
my $offset = $count - $records_per_loop;
|
my $offset = $count - $records_per_loop;
|
||||||
my $loop = 0;
|
my $loop = 0;
|
||||||
|
my $do_delete = 1;
|
||||||
for (1..$loops)
|
for (1..$loops)
|
||||||
{
|
{
|
||||||
# We need to date stamp from the closest record to the offset.
|
# We need to date stamp from the closest record to the offset.
|
||||||
$loop++;
|
$loop++;
|
||||||
my $sql_file = "
|
|
||||||
|
# Are we archiving to disk?
|
||||||
|
$do_delete = 1;
|
||||||
|
if ($anvil->data->{sys}{database}{archive}{save_to_disk})
|
||||||
|
{
|
||||||
|
if (not -d $anvil->data->{sys}{database}{archive}{directory})
|
||||||
|
{
|
||||||
|
my $failed = $anvil->Storage->make_directory({
|
||||||
|
debug => $debug,
|
||||||
|
directory => $anvil->data->{sys}{database}{archive}{directory},
|
||||||
|
mode => "0700",
|
||||||
|
user => "root",
|
||||||
|
group => "root",
|
||||||
|
});
|
||||||
|
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { failed => $failed }});
|
||||||
|
if ($failed)
|
||||||
|
{
|
||||||
|
# No directory to archive into...
|
||||||
|
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => $debug, priority => "err", key => "error_0098", variables => {
|
||||||
|
directory => $anvil->data->{sys}{database}{archive}{directory},
|
||||||
|
}});
|
||||||
|
return("!!error!!");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
my $sql_file = "
|
||||||
-- Dump created at: [".$anvil->Get->date_and_time()."]
|
-- Dump created at: [".$anvil->Get->date_and_time()."]
|
||||||
SET statement_timeout = 0;
|
SET statement_timeout = 0;
|
||||||
SET lock_timeout = 0;
|
SET lock_timeout = 0;
|
||||||
@ -15626,96 +15634,100 @@ SET client_min_messages = warning;
|
|||||||
SET row_security = off;
|
SET row_security = off;
|
||||||
|
|
||||||
COPY history.".$table." (";
|
COPY history.".$table." (";
|
||||||
my $query = "SELECT modified_date FROM history.".$table." ORDER BY modified_date ASC OFFSET ".$offset." LIMIT 1;";
|
my $query = "SELECT modified_date FROM history.".$table." ORDER BY modified_date ASC OFFSET ".$offset." LIMIT 1;";
|
||||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
|
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
|
||||||
"s1:loop" => $loop,
|
"s1:loop" => $loop,
|
||||||
"s2:query" => $query,
|
"s2:query" => $query,
|
||||||
"s3:sql_file" => $sql_file,
|
"s3:sql_file" => $sql_file,
|
||||||
}});
|
|
||||||
|
|
||||||
my $modified_date = $anvil->Database->query({uuid => $uuid, query => $query, source => $THIS_FILE, line => __LINE__})->[0]->[0];
|
|
||||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { modified_date => $modified_date }});
|
|
||||||
|
|
||||||
# Build the query.
|
|
||||||
$query = "SELECT ";
|
|
||||||
foreach my $column (sort {$a cmp $b} @{$columns})
|
|
||||||
{
|
|
||||||
$sql_file .= $column->[0].", ";
|
|
||||||
$query .= $column->[0].", ";
|
|
||||||
}
|
|
||||||
$sql_file .= "modified_date) FROM stdin;\n";
|
|
||||||
$query .= "modified_date FROM history.".$table." WHERE modified_date >= '".$modified_date."' ORDER BY modified_date ASC;";
|
|
||||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
|
|
||||||
sql_file => $sql_file,
|
|
||||||
query => $query,
|
|
||||||
}});
|
|
||||||
my $results = $anvil->Database->query({uuid => $uuid, query => $query, source => $THIS_FILE, line => __LINE__});
|
|
||||||
my $count = @{$results};
|
|
||||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
|
|
||||||
results => $results,
|
|
||||||
count => $count,
|
|
||||||
}});
|
|
||||||
|
|
||||||
foreach my $row (@{$results})
|
|
||||||
{
|
|
||||||
# Build the string.
|
|
||||||
my $line = "";
|
|
||||||
my $i = 0;
|
|
||||||
foreach my $column (@{$columns})
|
|
||||||
{
|
|
||||||
my $value = defined $row->[$i] ? $row->[$i] : '\N';
|
|
||||||
$i++;
|
|
||||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
|
|
||||||
"s1:i" => $i,
|
|
||||||
"s2:column" => $column,
|
|
||||||
"s3:value" => $value,
|
|
||||||
}});
|
|
||||||
|
|
||||||
# We need to convert tabs and newlines into \t and \n
|
|
||||||
$value =~ s/\t/\\t/g;
|
|
||||||
$value =~ s/\n/\\n/g;
|
|
||||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { value => $value }});
|
|
||||||
|
|
||||||
$line .= $value."\t";
|
|
||||||
}
|
|
||||||
# Add the modified_date column.
|
|
||||||
$line .= $row->[$i]."\n";
|
|
||||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { line => $line }});
|
|
||||||
|
|
||||||
$sql_file .= $line;
|
|
||||||
}
|
|
||||||
$sql_file .= "\\.\n\n";;
|
|
||||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { sql_file => $sql_file }});
|
|
||||||
|
|
||||||
my $archive_file = $directory."/".$anvil->Database->get_host_from_uuid({short => 1, host_uuid => $uuid}).".".$table.".".$time_stamp.".".$loop.".out";
|
|
||||||
$archive_file =~ s/\/\//\//g;
|
|
||||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { archive_file => $archive_file }});
|
|
||||||
|
|
||||||
# It may not be secure, but we play it safe.
|
|
||||||
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, key => "log_0454", variables => {
|
|
||||||
records => $anvil->Convert->add_commas({number => $count}),
|
|
||||||
file => $archive_file,
|
|
||||||
}});
|
|
||||||
my ($failed) = $anvil->Storage->write_file({
|
|
||||||
debug => $debug,
|
|
||||||
body => $sql_file,
|
|
||||||
file => $archive_file,
|
|
||||||
user => "root",
|
|
||||||
group => "root",
|
|
||||||
mode => "0600",
|
|
||||||
secure => 1.
|
|
||||||
});
|
|
||||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { failed => $failed }});
|
|
||||||
|
|
||||||
if ($failed)
|
|
||||||
{
|
|
||||||
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 0, priority => "err", key => "error_0099", variables => {
|
|
||||||
file => $archive_file,
|
|
||||||
table => $table,
|
|
||||||
}});
|
}});
|
||||||
last;
|
|
||||||
|
my $modified_date = $anvil->Database->query({uuid => $uuid, query => $query, source => $THIS_FILE, line => __LINE__})->[0]->[0];
|
||||||
|
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { modified_date => $modified_date }});
|
||||||
|
|
||||||
|
# Build the query.
|
||||||
|
$query = "SELECT ";
|
||||||
|
foreach my $column (sort {$a cmp $b} @{$columns})
|
||||||
|
{
|
||||||
|
$sql_file .= $column->[0].", ";
|
||||||
|
$query .= $column->[0].", ";
|
||||||
|
}
|
||||||
|
$sql_file .= "modified_date) FROM stdin;\n";
|
||||||
|
$query .= "modified_date FROM history.".$table." WHERE modified_date >= '".$modified_date."' ORDER BY modified_date ASC;";
|
||||||
|
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
|
||||||
|
sql_file => $sql_file,
|
||||||
|
query => $query,
|
||||||
|
}});
|
||||||
|
my $results = $anvil->Database->query({uuid => $uuid, query => $query, source => $THIS_FILE, line => __LINE__});
|
||||||
|
my $count = @{$results};
|
||||||
|
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
|
||||||
|
results => $results,
|
||||||
|
count => $count,
|
||||||
|
}});
|
||||||
|
|
||||||
|
foreach my $row (@{$results})
|
||||||
|
{
|
||||||
|
# Build the string.
|
||||||
|
my $line = "";
|
||||||
|
my $i = 0;
|
||||||
|
foreach my $column (@{$columns})
|
||||||
|
{
|
||||||
|
my $value = defined $row->[$i] ? $row->[$i] : '\N';
|
||||||
|
$i++;
|
||||||
|
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
|
||||||
|
"s1:i" => $i,
|
||||||
|
"s2:column" => $column,
|
||||||
|
"s3:value" => $value,
|
||||||
|
}});
|
||||||
|
|
||||||
|
# We need to convert tabs and newlines into \t and \n
|
||||||
|
$value =~ s/\t/\\t/g;
|
||||||
|
$value =~ s/\n/\\n/g;
|
||||||
|
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { value => $value }});
|
||||||
|
|
||||||
|
$line .= $value."\t";
|
||||||
|
}
|
||||||
|
# Add the modified_date column.
|
||||||
|
$line .= $row->[$i]."\n";
|
||||||
|
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { line => $line }});
|
||||||
|
|
||||||
|
$sql_file .= $line;
|
||||||
|
}
|
||||||
|
$sql_file .= "\\.\n\n";;
|
||||||
|
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { sql_file => $sql_file }});
|
||||||
|
|
||||||
|
my $archive_file = $directory."/".$anvil->Database->get_host_from_uuid({short => 1, host_uuid => $uuid}).".".$table.".".$time_stamp.".".$loop.".out";
|
||||||
|
$archive_file =~ s/\/\//\//g;
|
||||||
|
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { archive_file => $archive_file }});
|
||||||
|
|
||||||
|
# It may not be secure, but we play it safe.
|
||||||
|
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, key => "log_0454", variables => {
|
||||||
|
records => $anvil->Convert->add_commas({number => $count}),
|
||||||
|
file => $archive_file,
|
||||||
|
}});
|
||||||
|
my ($failed) = $anvil->Storage->write_file({
|
||||||
|
debug => $debug,
|
||||||
|
body => $sql_file,
|
||||||
|
file => $archive_file,
|
||||||
|
user => "root",
|
||||||
|
group => "root",
|
||||||
|
mode => "0600",
|
||||||
|
secure => 1.
|
||||||
|
});
|
||||||
|
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { failed => $failed }});
|
||||||
|
|
||||||
|
if ($failed)
|
||||||
|
{
|
||||||
|
$do_delete = 0;
|
||||||
|
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 0, priority => "err", key => "error_0099", variables => {
|
||||||
|
file => $archive_file,
|
||||||
|
table => $table,
|
||||||
|
}});
|
||||||
|
last;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else
|
|
||||||
|
# Do Delete.
|
||||||
|
if ($do_delete)
|
||||||
{
|
{
|
||||||
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, key => "log_0283"});
|
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, key => "log_0283"});
|
||||||
$vacuum = 1;
|
$vacuum = 1;
|
||||||
@ -15739,7 +15751,6 @@ COPY history.".$table." (";
|
|||||||
|
|
||||||
$offset -= $records_per_loop;
|
$offset -= $records_per_loop;
|
||||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { offset => $offset }});
|
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { offset => $offset }});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { vacuum => $vacuum }});
|
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { vacuum => $vacuum }});
|
||||||
@ -16101,8 +16112,10 @@ ORDER BY
|
|||||||
|
|
||||||
# To avoid resyncs triggered by the differences that might occur if the row
|
# To avoid resyncs triggered by the differences that might occur if the row
|
||||||
# count changed slightly between counting both/all DBs, we won't resync
|
# count changed slightly between counting both/all DBs, we won't resync
|
||||||
# until there's at least 10 rows different.
|
# until there's at least 10 rows different. The exception is the hosts file,
|
||||||
if ($difference > 10)
|
# as it needs to resync on a single line difference when adding peer Striker
|
||||||
|
# dashboards.
|
||||||
|
if (($table eq "hosts") or ($difference > 10))
|
||||||
{
|
{
|
||||||
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, priority => "alert", key => "log_0219", variables => {
|
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, priority => "alert", key => "log_0219", variables => {
|
||||||
missing => $difference,
|
missing => $difference,
|
||||||
|
@ -76,10 +76,10 @@ feature::scancore::disable::preventative-live-migration = 0
|
|||||||
# NOTE: If the archive directory doesn't exist, Anvil! will create it
|
# NOTE: If the archive directory doesn't exist, Anvil! will create it
|
||||||
# automatically the first time it is needed.
|
# automatically the first time it is needed.
|
||||||
sys::database::archive::compress = 1
|
sys::database::archive::compress = 1
|
||||||
sys::database::archive::count = 10000
|
sys::database::archive::trigger = 50000
|
||||||
|
sys::database::archive::count = 25000
|
||||||
|
sys::database::archive::division = 30000
|
||||||
sys::database::archive::directory = /usr/local/anvil/archives/
|
sys::database::archive::directory = /usr/local/anvil/archives/
|
||||||
sys::database::archive::division = 250000
|
|
||||||
sys::database::archive::trigger = 20000
|
|
||||||
|
|
||||||
# This puts a limit on how many queries (writes, generally) to make in a single batch transaction. This is
|
# This puts a limit on how many queries (writes, generally) to make in a single batch transaction. This is
|
||||||
# useful when doing very large transacions, like resync'ing a large table, by limiting how long a given
|
# useful when doing very large transacions, like resync'ing a large table, by limiting how long a given
|
||||||
|
@ -17,6 +17,9 @@
|
|||||||
# - Write a script that runs in crontab at UTC 17:00 that sends an email if Scancore or anvil-daemon are disabled.
|
# - Write a script that runs in crontab at UTC 17:00 that sends an email if Scancore or anvil-daemon are disabled.
|
||||||
# - Examine limites in: https://www.freedesktop.org/software/systemd/man/systemd.exec.html#LimitCPU=
|
# - Examine limites in: https://www.freedesktop.org/software/systemd/man/systemd.exec.html#LimitCPU=
|
||||||
# - Write a background program to scan the BCN and uses OUI data to try and find / auto-configure PDUs and UPSes
|
# - Write a background program to scan the BCN and uses OUI data to try and find / auto-configure PDUs and UPSes
|
||||||
|
# -
|
||||||
|
# - Increase DRBD's default timeout
|
||||||
|
# - Check for and enable persistent journald logging
|
||||||
#
|
#
|
||||||
# NOTE:
|
# NOTE:
|
||||||
# - For later; 'reboot --force --force' immediately kills the OS, like disabling ACPI on EL6 and hitting the
|
# - For later; 'reboot --force --force' immediately kills the OS, like disabling ACPI on EL6 and hitting the
|
||||||
|
@ -375,7 +375,7 @@ sub process_rename_file
|
|||||||
progress => 100,
|
progress => 100,
|
||||||
message => "job_0140,!!file_path!".$old_file_path."!!",
|
message => "job_0140,!!file_path!".$old_file_path."!!",
|
||||||
});
|
});
|
||||||
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, key => "job_0140", variables => { old_file_path => $old_file_path }});
|
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, key => "job_0140", variables => { file_path => $old_file_path }});
|
||||||
$anvil->nice_exit({exit_code => 0});
|
$anvil->nice_exit({exit_code => 0});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user