* Increased the trigger point for database archiving. The current values were too low and cuasing frequent archive -> resync cycles.

* Fixed a bug in that archiving defaulting to not store on disk was not working properly. Now acts as described in anvil.conf.

Signed-off-by: Digimer <digimer@alteeve.ca>
main
Digimer 4 years ago
parent 48956d94fb
commit 41d528418d
  1. 235
      Anvil/Tools/Database.pm
  2. 6
      anvil.conf
  3. 2
      tools/anvil-sync-shared

@ -215,10 +215,11 @@ sub archive_database
}
# Make sure I have sane values.
$anvil->data->{sys}{database}{archive}{compress} = 1 if not defined $anvil->data->{sys}{database}{archive}{compress};
$anvil->data->{sys}{database}{archive}{count} = 10000 if not defined $anvil->data->{sys}{database}{archive}{count};
$anvil->data->{sys}{database}{archive}{division} = 25000 if not defined $anvil->data->{sys}{database}{archive}{division};
$anvil->data->{sys}{database}{archive}{trigger} = 20000 if not defined $anvil->data->{sys}{database}{archive}{trigger};
$anvil->data->{sys}{database}{archive}{compress} = 1 if not defined $anvil->data->{sys}{database}{archive}{compress};
$anvil->data->{sys}{database}{archive}{count} = 25000 if not defined $anvil->data->{sys}{database}{archive}{count};
$anvil->data->{sys}{database}{archive}{division} = 30000 if not defined $anvil->data->{sys}{database}{archive}{division};
$anvil->data->{sys}{database}{archive}{trigger} = 50000 if not defined $anvil->data->{sys}{database}{archive}{trigger};
$anvil->data->{sys}{database}{archive}{save_to_disk} = 0
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
"sys::database::archive::compress" => $anvil->data->{sys}{database}{archive}{compress},
"sys::database::archive::count" => $anvil->data->{sys}{database}{archive}{count},
@ -234,25 +235,6 @@ sub archive_database
"sys::database::archive::directory" => $anvil->data->{sys}{database}{archive}{directory},
}});
}
if (not -d $anvil->data->{sys}{database}{archive}{directory})
{
my $failed = $anvil->Storage->make_directory({
debug => $debug,
directory => $anvil->data->{sys}{database}{archive}{directory},
mode => "0700",
user => "root",
group => "root",
});
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { failed => $failed }});
if ($failed)
{
# No directory to archive into...
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => $debug, priority => "err", key => "error_0098", variables => {
directory => $anvil->data->{sys}{database}{archive}{directory},
}});
return("!!error!!");
}
}
# Make sure the numerical values are sane
if ($anvil->data->{sys}{database}{archive}{count} !~ /^\d+$/)
@ -15607,13 +15589,39 @@ sub _archive_table
column_count => $column_count
}});
my $offset = $count - $records_per_loop;
my $loop = 0;
my $offset = $count - $records_per_loop;
my $loop = 0;
my $do_delete = 1;
for (1..$loops)
{
# We need to date stamp from the closest record to the offset.
$loop++;
my $sql_file = "
$loop++;
# Are we archiving to disk?
$do_delete = 1;
if ($anvil->data->{sys}{database}{archive}{save_to_disk})
{
if (not -d $anvil->data->{sys}{database}{archive}{directory})
{
my $failed = $anvil->Storage->make_directory({
debug => $debug,
directory => $anvil->data->{sys}{database}{archive}{directory},
mode => "0700",
user => "root",
group => "root",
});
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { failed => $failed }});
if ($failed)
{
# No directory to archive into...
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => $debug, priority => "err", key => "error_0098", variables => {
directory => $anvil->data->{sys}{database}{archive}{directory},
}});
return("!!error!!");
}
}
my $sql_file = "
-- Dump created at: [".$anvil->Get->date_and_time()."]
SET statement_timeout = 0;
SET lock_timeout = 0;
@ -15626,96 +15634,100 @@ SET client_min_messages = warning;
SET row_security = off;
COPY history.".$table." (";
my $query = "SELECT modified_date FROM history.".$table." ORDER BY modified_date ASC OFFSET ".$offset." LIMIT 1;";
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
"s1:loop" => $loop,
"s2:query" => $query,
"s3:sql_file" => $sql_file,
}});
my $modified_date = $anvil->Database->query({uuid => $uuid, query => $query, source => $THIS_FILE, line => __LINE__})->[0]->[0];
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { modified_date => $modified_date }});
# Build the query.
$query = "SELECT ";
foreach my $column (sort {$a cmp $b} @{$columns})
{
$sql_file .= $column->[0].", ";
$query .= $column->[0].", ";
}
$sql_file .= "modified_date) FROM stdin;\n";
$query .= "modified_date FROM history.".$table." WHERE modified_date >= '".$modified_date."' ORDER BY modified_date ASC;";
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
sql_file => $sql_file,
query => $query,
}});
my $results = $anvil->Database->query({uuid => $uuid, query => $query, source => $THIS_FILE, line => __LINE__});
my $count = @{$results};
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
results => $results,
count => $count,
}});
foreach my $row (@{$results})
{
# Build the string.
my $line = "";
my $i = 0;
foreach my $column (@{$columns})
my $query = "SELECT modified_date FROM history.".$table." ORDER BY modified_date ASC OFFSET ".$offset." LIMIT 1;";
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
"s1:loop" => $loop,
"s2:query" => $query,
"s3:sql_file" => $sql_file,
}});
my $modified_date = $anvil->Database->query({uuid => $uuid, query => $query, source => $THIS_FILE, line => __LINE__})->[0]->[0];
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { modified_date => $modified_date }});
# Build the query.
$query = "SELECT ";
foreach my $column (sort {$a cmp $b} @{$columns})
{
my $value = defined $row->[$i] ? $row->[$i] : '\N';
$i++;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
"s1:i" => $i,
"s2:column" => $column,
"s3:value" => $value,
}});
# We need to convert tabs and newlines into \t and \n
$value =~ s/\t/\\t/g;
$value =~ s/\n/\\n/g;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { value => $value }});
$sql_file .= $column->[0].", ";
$query .= $column->[0].", ";
}
$sql_file .= "modified_date) FROM stdin;\n";
$query .= "modified_date FROM history.".$table." WHERE modified_date >= '".$modified_date."' ORDER BY modified_date ASC;";
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
sql_file => $sql_file,
query => $query,
}});
my $results = $anvil->Database->query({uuid => $uuid, query => $query, source => $THIS_FILE, line => __LINE__});
my $count = @{$results};
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
results => $results,
count => $count,
}});
foreach my $row (@{$results})
{
# Build the string.
my $line = "";
my $i = 0;
foreach my $column (@{$columns})
{
my $value = defined $row->[$i] ? $row->[$i] : '\N';
$i++;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
"s1:i" => $i,
"s2:column" => $column,
"s3:value" => $value,
}});
# We need to convert tabs and newlines into \t and \n
$value =~ s/\t/\\t/g;
$value =~ s/\n/\\n/g;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { value => $value }});
$line .= $value."\t";
}
# Add the modified_date column.
$line .= $row->[$i]."\n";
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { line => $line }});
$line .= $value."\t";
$sql_file .= $line;
}
# Add the modified_date column.
$line .= $row->[$i]."\n";
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { line => $line }});
$sql_file .= "\\.\n\n";;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { sql_file => $sql_file }});
$sql_file .= $line;
}
$sql_file .= "\\.\n\n";;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { sql_file => $sql_file }});
my $archive_file = $directory."/".$anvil->Database->get_host_from_uuid({short => 1, host_uuid => $uuid}).".".$table.".".$time_stamp.".".$loop.".out";
$archive_file =~ s/\/\//\//g;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { archive_file => $archive_file }});
# It may not be secure, but we play it safe.
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, key => "log_0454", variables => {
records => $anvil->Convert->add_commas({number => $count}),
file => $archive_file,
}});
my ($failed) = $anvil->Storage->write_file({
debug => $debug,
body => $sql_file,
file => $archive_file,
user => "root",
group => "root",
mode => "0600",
secure => 1.
});
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { failed => $failed }});
if ($failed)
{
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 0, priority => "err", key => "error_0099", variables => {
file => $archive_file,
table => $table,
my $archive_file = $directory."/".$anvil->Database->get_host_from_uuid({short => 1, host_uuid => $uuid}).".".$table.".".$time_stamp.".".$loop.".out";
$archive_file =~ s/\/\//\//g;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { archive_file => $archive_file }});
# It may not be secure, but we play it safe.
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, key => "log_0454", variables => {
records => $anvil->Convert->add_commas({number => $count}),
file => $archive_file,
}});
last;
my ($failed) = $anvil->Storage->write_file({
debug => $debug,
body => $sql_file,
file => $archive_file,
user => "root",
group => "root",
mode => "0600",
secure => 1.
});
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { failed => $failed }});
if ($failed)
{
$do_delete = 0;
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 0, priority => "err", key => "error_0099", variables => {
file => $archive_file,
table => $table,
}});
last;
}
}
else
# Do Delete.
if ($do_delete)
{
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, key => "log_0283"});
$vacuum = 1;
@ -15739,7 +15751,6 @@ COPY history.".$table." (";
$offset -= $records_per_loop;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { offset => $offset }});
}
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { vacuum => $vacuum }});

@ -76,10 +76,10 @@ feature::scancore::disable::preventative-live-migration = 0
# NOTE: If the archive directory doesn't exist, Anvil! will create it
# automatically the first time it is needed.
sys::database::archive::compress = 1
sys::database::archive::count = 10000
sys::database::archive::trigger = 50000
sys::database::archive::count = 25000
sys::database::archive::division = 30000
sys::database::archive::directory = /usr/local/anvil/archives/
sys::database::archive::division = 250000
sys::database::archive::trigger = 20000
# This puts a limit on how many queries (writes, generally) to make in a single batch transaction. This is
# useful when doing very large transacions, like resync'ing a large table, by limiting how long a given

@ -375,7 +375,7 @@ sub process_rename_file
progress => 100,
message => "job_0140,!!file_path!".$old_file_path."!!",
});
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, key => "job_0140", variables => { old_file_path => $old_file_path }});
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, key => "job_0140", variables => { file_path => $old_file_path }});
$anvil->nice_exit({exit_code => 0});
}

Loading…
Cancel
Save