@ -9,7 +9,6 @@ use DBI;
use Scalar::Util qw( weaken isweak ) ;
use Data::Dumper ;
use Time::HiRes qw( gettimeofday tv_interval ) ;
use SQL::Parser ;
our $ VERSION = "3.0.0" ;
my $ THIS_FILE = "Database.pm" ;
@ -619,7 +618,7 @@ This is the SQL schema file that will be used to initialize the database, if the
= head3 tables ( optional )
This is an optional array reference of of tables to specifically check when connecting to databases . Each entry is treated as a table name , and that table 's most recent C<< change _date >> time stamp will be read. If a column name in the table ends in C<< _host_uuid >>, then the check and resync will be restricted to entries in that column matching the current host' s C << sys:: host_uuid >> . If the table does not have a corresponding table in the C << history >> schema , then only the public table will be synced .
This is an optional array reference of of tables to specifically check when connecting to databases . Each entry is treated as a table name , and that table 's most recent C<< modified _date >> time stamp will be read. If a column name in the table ends in C<< _host_uuid >>, then the check and resync will be restricted to entries in that column matching the current host' s C << sys:: host_uuid >> . If the table does not have a corresponding table in the C << history >> schema , then only the public table will be synced .
Note ; The array order is used to allow you to ensure tables with primary keys are synchronyzed before tables with foreign keys . As such , please be aware of the order the table hash references are put into the array reference .
@ -1059,7 +1058,7 @@ sub connect
set_by = > $ THIS_FILE ,
record_locator = > $ uuid ,
name = > "connect_to_db" ,
change _date = > $ anvil - > data - > { sys } { database } { timestamp } ,
modified _date = > $ anvil - > data - > { sys } { database } { timestamp } ,
} ) ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { set = > $ set } } ) ;
@ -1120,7 +1119,7 @@ sub connect
set_by = > $ THIS_FILE ,
record_locator = > $ uuid ,
name = > "connect_to_db" ,
change _date = > $ anvil - > data - > { sys } { database } { timestamp } ,
modified _date = > $ anvil - > data - > { sys } { database } { timestamp } ,
} ) ;
if ( $ cleared )
{
@ -1236,7 +1235,7 @@ Each anonymous hash is structured as:
host_uuid = > $ host_uuid ,
host_name = > $ host_name ,
host_type = > $ host_type ,
change _date = > $ change _date,
modified _date = > $ modified _date,
It also sets the variables C << sys::hosts::by_uuid:: <host_uuid> = <host_name> >> and C << sys::hosts::by_name:: <host_name> = <host_uuid> >> per host read , for quick reference .
@ -1254,7 +1253,7 @@ SELECT
host_uuid ,
host_name ,
host_type ,
change _date
modified _date
FROM
hosts
; " ;
@ -1272,18 +1271,18 @@ FROM
my $ host_uuid = $ row - > [ 0 ] ;
my $ host_name = $ row - > [ 1 ] ;
my $ host_type = $ row - > [ 2 ] ;
my $ change _date = $ row - > [ 3 ] ;
my $ modified _date = $ row - > [ 3 ] ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
host_uuid = > $ host_uuid ,
host_name = > $ host_name ,
host_type = > $ host_type ,
change _date = > $ change _date,
modified _date = > $ modified _date,
} } ) ;
push @ { $ return } , {
host_uuid = > $ host_uuid ,
host_name = > $ host_name ,
host_type = > $ host_type ,
change _date = > $ change _date,
modified _date = > $ modified _date,
} ;
# Record the host_uuid in a hash so that the name can be easily retrieved.
@ -1339,7 +1338,7 @@ SELECT
job_title ,
job_description ,
job_status ,
change _date
modified _date
FROM
jobs
WHERE
@ -1366,7 +1365,7 @@ WHERE
my $ job_title = $ row - > [ 8 ] ;
my $ job_description = $ row - > [ 9 ] ;
my $ job_status = $ row - > [ 10 ] ;
my $ change _date = $ row - > [ 11 ] ;
my $ modified _date = $ row - > [ 11 ] ;
my $ started_seconds_ago = $ job_picked_up_at ? ( time - $ job_picked_up_at ) : 0 ;
my $ updated_seconds_ago = $ job_updated ? ( time - $ job_updated ) : 0 ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
@ -1381,7 +1380,7 @@ WHERE
job_title = > $ job_title ,
job_description = > $ job_description ,
job_status = > $ job_status ,
change _date = > $ change _date,
modified _date = > $ modified _date,
started_seconds_ago = > $ started_seconds_ago ,
updated_seconds_ago = > $ updated_seconds_ago ,
} } ) ;
@ -1405,7 +1404,7 @@ WHERE
job_title = > $ job_title ,
job_description = > $ job_description ,
job_status = > $ job_status ,
change _date = > $ change _date,
modified _date = > $ modified _date,
} ;
}
@ -1724,7 +1723,7 @@ INSERT INTO
bridge_name ,
bridge_id ,
bridge_stp_enabled ,
change _date
modified _date
) VALUES (
".$anvil->data->{sys}{database}{use_handle}->quote($bridge_uuid)." ,
".$anvil->data->{sys}{database}{use_handle}->quote($bridge_host_uuid)." ,
@ -1793,7 +1792,7 @@ SET
bridge_name = ".$anvil->data->{sys}{database}{use_handle}->quote($bridge_name)." ,
bridge_id = ".$anvil->data->{sys}{database}{use_handle}->quote($bridge_id)." ,
bridge_stp_enabled = ".$anvil->data->{sys}{database}{use_handle}->quote($bridge_stp_enabled)." ,
change _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
modified _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
WHERE
bridge_uuid = ".$anvil->data->{sys}{database}{use_handle}->quote($bridge_uuid)."
" ;
@ -2008,7 +2007,7 @@ INSERT INTO
bond_down_delay ,
bond_mac_address ,
bond_operational ,
change _date
modified _date
) VALUES (
".$anvil->data->{sys}{database}{use_handle}->quote($bond_uuid)." ,
".$anvil->data->{sys}{database}{use_handle}->quote($bond_host_uuid)." ,
@ -2125,7 +2124,7 @@ SET
bond_down_delay = ".$anvil->data->{sys}{database}{use_handle}->quote($bond_down_delay)." ,
bond_mac_address = ".$anvil->data->{sys}{database}{use_handle}->quote($bond_mac_address)." ,
bond_operational = ".$anvil->data->{sys}{database}{use_handle}->quote($bond_operational)." ,
change _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
modified _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
WHERE
bond_uuid = ".$anvil->data->{sys}{database}{use_handle}->quote($bond_uuid)."
" ;
@ -2246,7 +2245,7 @@ INSERT INTO
host_uuid ,
host_name ,
host_type ,
change _date
modified _date
) VALUES (
".$anvil->data->{sys}{database}{use_handle}->quote($host_uuid)." ,
".$anvil->data->{sys}{database}{use_handle}->quote($host_name)." ,
@ -2266,7 +2265,7 @@ UPDATE
SET
host_name = ".$anvil->data->{sys}{database}{use_handle}->quote($host_name)." ,
host_type = ".$anvil->data->{sys}{database}{use_handle}->quote($host_type)." ,
change _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
modified _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
WHERE
host_uuid = ".$anvil->data->{sys}{database}{use_handle}->quote($host_uuid)."
; " ;
@ -2466,7 +2465,7 @@ INSERT INTO
ip_address_gateway ,
ip_address_default_gateway ,
ip_address_dns ,
change _date
modified _date
) VALUES (
".$anvil->data->{sys}{database}{use_handle}->quote($ip_address_uuid)." ,
".$anvil->data->{sys}{database}{use_handle}->quote($ip_address_host_uuid)." ,
@ -2559,7 +2558,7 @@ SET
ip_address_gateway = ".$anvil->data->{sys}{database}{use_handle}->quote($ip_address_gateway)." ,
ip_address_default_gateway = ".$anvil->data->{sys}{database}{use_handle}->quote($ip_address_default_gateway)." ,
ip_address_dns = ".$anvil->data->{sys}{database}{use_handle}->quote($ip_address_dns)." ,
change _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
modified _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
WHERE
ip_address_uuid = ".$anvil->data->{sys}{database}{use_handle}->quote($ip_address_uuid)."
" ;
@ -2832,7 +2831,7 @@ INSERT INTO
job_title ,
job_description ,
job_status ,
change _date
modified _date
) VALUES (
".$anvil->data->{sys}{database}{use_handle}->quote($job_uuid)." ,
".$anvil->data->{sys}{database}{use_handle}->quote($job_host_uuid)." ,
@ -2925,7 +2924,7 @@ UPDATE
jobs
SET
job_progress = ".$anvil->data->{sys}{database}{use_handle}->quote($job_progress)." ,
change _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
modified _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
WHERE
job_uuid = ".$anvil->data->{sys}{database}{use_handle}->quote($job_uuid)."
" ;
@ -2937,7 +2936,7 @@ UPDATE
SET
job_progress = ".$anvil->data->{sys}{database}{use_handle}->quote($job_progress)." ,
job_status = ".$anvil->data->{sys}{database}{use_handle}->quote($job_status)." ,
change _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
modified _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
WHERE
job_uuid = ".$anvil->data->{sys}{database}{use_handle}->quote($job_uuid)."
" ;
@ -2976,7 +2975,7 @@ SET
job_title = ".$anvil->data->{sys}{database}{use_handle}->quote($job_title)." ,
job_description = ".$anvil->data->{sys}{database}{use_handle}->quote($job_description)." ,
job_status = ".$anvil->data->{sys}{database}{use_handle}->quote($job_status)." ,
change _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
modified _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
WHERE
job_uuid = ".$anvil->data->{sys}{database}{use_handle}->quote($job_uuid)."
" ;
@ -3238,7 +3237,7 @@ SET
network_interface_medium = ".$anvil->data->{sys}{database}{use_handle}->quote($network_interface_medium)." ,
network_interface_mtu = ".$anvil->data->{sys}{database}{use_handle}->quote($network_interface_mtu)." ,
network_interface_speed = ".$anvil->data->{sys}{database}{use_handle}->quote($network_interface_speed)." ,
change _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
modified _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
WHERE
network_interface_uuid = ".$anvil->data->{sys}{database}{use_handle}->quote($network_interface_uuid)."
; " ;
@ -3270,7 +3269,7 @@ INSERT INTO
network_interface_medium ,
network_interface_mtu ,
network_interface_speed ,
change _date
modified _date
) VALUES (
".$anvil->data->{sys}{database}{use_handle}->quote($network_interface_uuid)." ,
".$anvil->data->{sys}{database}{use_handle}->quote($network_interface_bond_uuid)." ,
@ -3445,7 +3444,7 @@ SET
session_user_uuid = ".$anvil->data->{sys}{database}{use_handle}->quote($session_user_uuid)." ,
session_salt = ".$anvil->data->{sys}{database}{use_handle}->quote($session_salt)." ,
session_user_agent = ".$anvil->data->{sys}{database}{use_handle}->quote($session_user_agent)." ,
change _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
modified _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
WHERE
session_uuid = ".$anvil->data->{sys}{database}{use_handle}->quote($session_uuid)."
" ;
@ -3468,7 +3467,7 @@ INSERT INTO
session_user_uuid ,
session_salt ,
session_user_agent ,
change _date
modified _date
) VALUES (
".$anvil->data->{sys}{database}{use_handle}->quote($session_uuid)." ,
".$anvil->data->{sys}{database}{use_handle}->quote($session_host_uuid)." ,
@ -3625,7 +3624,7 @@ INSERT INTO
state_name ,
state_host_uuid ,
state_note ,
change _date
modified _date
) VALUES (
".$anvil->data->{sys}{database}{use_handle}->quote($state_uuid)." ,
".$anvil->data->{sys}{database}{use_handle}->quote($state_name)." ,
@ -3688,7 +3687,7 @@ SET
state_name = ".$anvil->data->{sys}{database}{use_handle}->quote($state_name)." ,
state_host_uuid = ".$anvil->data->{sys}{database}{use_handle}->quote($state_host_uuid)." ,
state_note = ".$anvil->data->{sys}{database}{use_handle}->quote($state_note)." ,
change _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
modified _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
WHERE
state_uuid = ".$anvil->data->{sys}{database}{use_handle}->quote($state_uuid)."
" ;
@ -3919,7 +3918,7 @@ INSERT INTO
user_is_admin ,
user_is_experienced ,
user_is_trusted ,
change _date
modified _date
) VALUES (
".$anvil->data->{sys}{database}{use_handle}->quote($user_uuid)." ,
".$anvil->data->{sys}{database}{use_handle}->quote($user_name)." ,
@ -4019,7 +4018,7 @@ SET
user_is_admin = ".$anvil->data->{sys}{database}{use_handle}->quote($user_is_admin)." ,
user_is_experienced = ".$anvil->data->{sys}{database}{use_handle}->quote($user_is_experienced)." ,
user_is_trusted = ".$anvil->data->{sys}{database}{use_handle}->quote($user_is_trusted)." ,
change _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
modified _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
WHERE
user_uuid = ".$anvil->data->{sys}{database}{use_handle}->quote($user_uuid)."
" ;
@ -4209,7 +4208,7 @@ INSERT INTO
variable_section ,
variable_source_uuid ,
variable_source_table ,
change _date
modified _date
) VALUES (
".$anvil->data->{sys}{database}{use_handle}->quote($variable_uuid)." ,
".$anvil->data->{sys}{database}{use_handle}->quote($variable_name)." ,
@ -4280,7 +4279,7 @@ UPDATE
variables
SET
variable_value = ".$anvil->data->{sys}{database}{use_handle}->quote($variable_value)." ,
change _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
modified _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
WHERE
variable_uuid = " . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ variable_uuid ) ;
if ( ( $ variable_source_uuid ne "" ) && ( $ variable_source_table ne "" ) )
@ -4360,7 +4359,7 @@ SET
variable_default = ".$anvil->data->{sys}{database}{use_handle}->quote($variable_default)." ,
variable_description = ".$anvil->data->{sys}{database}{use_handle}->quote($variable_description)." ,
variable_section = ".$anvil->data->{sys}{database}{use_handle}->quote($variable_section)." ,
change _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
modified _date = ".$anvil->data->{sys}{database}{use_handle}->quote($anvil->data->{sys}{database}{timestamp})."
WHERE
variable_uuid = ".$anvil->data->{sys}{database}{use_handle}->quote($variable_uuid)."
" ;
@ -4490,11 +4489,11 @@ sub locking
# If I have been asked to check, we will return the variable_uuid if a lock is set.
if ( $ check )
{
my ( $ lock_value , $ variable_uuid , $ change _date) = $ anvil - > Database - > read_variable ( { variable_name = > $ variable_name } ) ;
my ( $ lock_value , $ variable_uuid , $ modified _date) = $ anvil - > Database - > read_variable ( { variable_name = > $ variable_name } ) ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
lock_value = > $ lock_value ,
variable_uuid = > $ variable_uuid ,
change _date = > $ change _date,
modified _date = > $ modified _date,
} } ) ;
return ( $ lock_value ) ;
@ -4505,11 +4504,11 @@ sub locking
{
# We check to see if there is a lock before we clear it. This way we don't log that we
# released a lock unless we really released a lock.
my ( $ lock_value , $ variable_uuid , $ change _date) = $ anvil - > Database - > read_variable ( { variable_name = > $ variable_name } ) ;
my ( $ lock_value , $ variable_uuid , $ modified _date) = $ anvil - > Database - > read_variable ( { variable_name = > $ variable_name } ) ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
lock_value = > $ lock_value ,
variable_uuid = > $ variable_uuid ,
change _date = > $ change _date,
modified _date = > $ modified _date,
} } ) ;
if ( $ lock_value )
@ -4571,12 +4570,12 @@ sub locking
$ waiting = 0 ;
# See if we had a lock.
my ( $ lock_value , $ variable_uuid , $ change _date) = $ anvil - > Database - > read_variable ( { variable_name = > $ variable_name } ) ;
my ( $ lock_value , $ variable_uuid , $ modified _date) = $ anvil - > Database - > read_variable ( { variable_name = > $ variable_name } ) ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
waiting = > $ waiting ,
lock_value = > $ lock_value ,
variable_uuid = > $ variable_uuid ,
change _date = > $ change _date,
modified _date = > $ modified _date,
} } ) ;
if ( $ lock_value =~ /^(.*?)::(.*?)::(\d+)/ )
{
@ -4893,7 +4892,7 @@ sub read_variable
SELECT
variable_value ,
variable_uuid ,
round ( extract ( epoch from change _date) ) AS mtime
round ( extract ( epoch from modified _date) ) AS mtime
FROM
variables
WHERE " ;
@ -4920,7 +4919,7 @@ AND
$ anvil - > Log - > entry ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , key = > "log_0124" , variables = > { query = > $ query } } ) ;
my $ variable_value = "" ;
my $ change _date = "" ;
my $ modified _date = "" ;
my $ results = $ anvil - > Database - > query ( { uuid = > $ uuid , query = > $ query , source = > $ THIS_FILE , line = > __LINE__ } ) ;
my $ count = @ { $ results } ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
@ -4931,20 +4930,20 @@ AND
{
$ variable_value = $ row - > [ 0 ] ;
$ variable_uuid = $ row - > [ 1 ] ;
$ change _date = $ row - > [ 2 ] ;
$ modified _date = $ row - > [ 2 ] ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
variable_value = > $ variable_value ,
variable_uuid = > $ variable_uuid ,
change _date = > $ change _date,
modified _date = > $ modified _date,
} } ) ;
}
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
variable_value = > $ variable_value ,
variable_uuid = > $ variable_uuid ,
change _date = > $ change _date,
modified _date = > $ modified _date,
} } ) ;
return ( $ variable_value , $ variable_uuid , $ change _date) ;
return ( $ variable_value , $ variable_uuid , $ modified _date) ;
}
= head2 resync_databases
@ -4967,9 +4966,6 @@ sub resync_databases
return ( 0 ) ;
}
### TODO: Multiple updates to a column with the same change_date don't resync properly. Use
### history_id? It has to become a UUID, as histpry_id will conflict across DBs given it simply
### increments per-DB currently
### NOTE: Don't sort this array, we need to resync in the order that the user passed the tables to us
### to avoid trouble with primary/foreign keys.
# We're going to use the array of tables assembles by _find_behind_databases() stored in
@ -5054,21 +5050,15 @@ sub resync_databases
$ anvil - > data - > { db_resync } { $ uuid } { public } { sql } = [] ;
$ anvil - > data - > { db_resync } { $ uuid } { history } { sql } = [] ;
# Read in the data, change_date first as we'll need that for all entries we record.
# Read in the data, modified_date first as we'll need that for all entries we record.
my $ query = "SELECT modified_date AT time zone 'UTC', $uuid_column, " ;
my $ read_columns = [] ;
my $ query = "SELECT change_date AT time zone 'UTC', $uuid_column, " ;
push @ { $ read_columns } , "change_date" ;
if ( $ schema eq "history" )
{
$ query = "SELECT change_date AT time zone 'UTC', history_id, $uuid_column, " ;
push @ { $ read_columns } , "history_id" ;
}
push @ { $ read_columns } , "modified_date" ;
push @ { $ read_columns } , $ uuid_column ;
foreach my $ column_name ( sort { $ a cmp $ b } keys % { $ anvil - > data - > { sys } { database } { table } { $ table } { column } } )
{
# We'll skip the host column as we'll use it in the conditional.
next if $ column_name eq "change_date" ;
next if $ column_name eq "history_id" ;
next if $ column_name eq "modified_date" ;
next if $ column_name eq $ host_column ;
next if $ column_name eq $ uuid_column ;
$ query . = $ column_name . ", " ;
@ -5085,11 +5075,7 @@ sub resync_databases
{
$ query . = " WHERE " . $ host_column . " = " . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ anvil - > data - > { sys } { host_uuid } ) ;
}
$ query . = " ORDER BY change_date DESC;" ;
if ( $ schema eq "history" )
{
$ query . = " ORDER BY change_date DESC, history_id ASC;" ;
}
$ query . = " ORDER BY modified_date DESC;" ;
$ anvil - > Log - > entry ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , key = > "log_0074" , variables = > { uuid = > $ uuid , query = > $ query } } ) ;
my $ results = $ anvil - > Database - > query ( { uuid = > $ uuid , query = > $ query , source = > $ THIS_FILE , line = > __LINE__ } ) ;
@ -5104,7 +5090,7 @@ sub resync_databases
foreach my $ row ( @ { $ results } )
{
$ row_number + + ;
my $ change _date = "" ;
my $ modified _date = "" ;
my $ row_uuid = "" ;
for ( my $ column_number = 0 ; $ column_number < @ { $ read_columns } ; $ column_number + + )
{
@ -5112,7 +5098,7 @@ sub resync_databases
my $ column_value = defined $ row - > [ $ column_number ] ? $ row - > [ $ column_number ] : "NULL" ;
my $ not_null = $ anvil - > data - > { sys } { database } { table } { $ table } { column } { $ column_name } { not_null } ;
my $ data_type = $ anvil - > data - > { sys } { database } { table } { $ table } { column } { $ column_name } { data_type } ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
$ anvil - > Log - > variables ( { source = > 2 , line = > __LINE__ , level = > $ debug , list = > {
"s1:id" = > $ uuid ,
"s2:row_number" = > $ row_number ,
"s3:column_number" = > $ column_number ,
@ -5127,11 +5113,11 @@ sub resync_databases
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { column_value = > $ column_value } } ) ;
}
# The change _date should be the first row.
if ( $ column_name eq "change _date" )
# The modified _date should be the first row.
if ( $ column_name eq "modified _date" )
{
$ change _date = $ column_value ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { change _date = > $ change _date } } ) ;
$ modified _date = $ column_value ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { modified _date = > $ modified _date } } ) ;
next ;
}
@ -5153,26 +5139,26 @@ sub resync_databases
}
# TODO: Remove these or make them proper errors
die $ THIS_FILE . " " . __LINE__ . "; This row's change _date wasn't the first column returned in query: [$query]\n" if not $ change _date;
die $ THIS_FILE . " " . __LINE__ . "; This row's modified _date wasn't the first column returned in query: [$query]\n" if not $ modified _date;
die $ THIS_FILE . " " . __LINE__ . "; This row's UUID column: [$uuid_column] wasn't the second column returned in query: [$query]\n" if not $ row_uuid ;
# Record this in the unified and db hashe s.
$ anvil - > data - > { db_data } { unified } { $ table } { change _date} { $ change _date} { $ uuid_column } { $ row_uuid } { $ column_name } = $ column_value ;
$ anvil - > data - > { db_data } { $ uuid } { $ table } { change _date} { $ change _date} { $ uuid_column } { $ row_uuid } { $ column_name } = $ column_value ;
# Record this in the unified and local hashes. # This table isn't restricted to given host s.
$ anvil - > data - > { db_data } { unified } { $ table } { modified _date} { $ modified _date} { $ uuid_column } { $ row_uuid } { $ column_name } = $ column_value ;
$ anvil - > data - > { db_data } { $ uuid } { $ table } { modified _date} { $ modified _date} { $ uuid_column } { $ row_uuid } { $ column_name } = $ column_value ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
"db_data::unified::${table}::change_date::${change _date}::${uuid_column}::${row_uuid}::${column_name}" = > $ anvil - > data - > { db_data } { unified } { $ table } { change _date} { $ change _date} { $ uuid_column } { $ row_uuid } { $ column_name } ,
"db_data::${uuid}::${table}::change_date::${change _date}::${uuid_column}::${row_uuid}::${column_name}" = > $ anvil - > data - > { db_data } { $ uuid } { $ table } { change _date} { $ change _date} { $ uuid_column } { $ row_uuid } { $ column_name } ,
"db_data::unified::${table}::modified_date::${modified _date}::${uuid_column}::${row_uuid}::${column_name}" = > $ anvil - > data - > { db_data } { unified } { $ table } { modified _date} { $ modified _date} { $ uuid_column } { $ row_uuid } { $ column_name } ,
"db_data::${uuid}::${table}::modified_date::${modified _date}::${uuid_column}::${row_uuid}::${column_name}" = > $ anvil - > data - > { db_data } { $ uuid } { $ table } { modified _date} { $ modified _date} { $ uuid_column } { $ row_uuid } { $ column_name } ,
} } ) ;
}
}
}
# Now all the data is read in, we can see what might be missing from each DB.
foreach my $ change _date ( sort { $ b cmp $ a } keys % { $ anvil - > data - > { db_data } { unified } { $ table } { change _date} } )
foreach my $ modified _date ( sort { $ b cmp $ a } keys % { $ anvil - > data - > { db_data } { unified } { $ table } { modified _date} } )
{
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { change _date = > $ change _date } } ) ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { modified _date = > $ modified _date } } ) ;
foreach my $ row_uuid ( sort { $ a cmp $ b } keys % { $ anvil - > data - > { db_data } { unified } { $ table } { change _date} { $ change _date} { $ uuid_column } } )
foreach my $ row_uuid ( sort { $ a cmp $ b } keys % { $ anvil - > data - > { db_data } { unified } { $ table } { modified _date} { $ modified _date} { $ uuid_column } } )
{
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { row_uuid = > $ row_uuid } } ) ;
@ -5207,15 +5193,15 @@ sub resync_databases
{
# It exists, but does it exist at this time stamp?
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
"db_data::${uuid}::${table}::change_date::${change _date}::${uuid_column}::${row_uuid}" = > $ anvil - > data - > { db_data } { $ uuid } { $ table } { change _date} { $ change _date} { $ uuid_column } { $ row_uuid } ,
"db_data::${uuid}::${table}::modified_date::${modified _date}::${uuid_column}::${row_uuid}" = > $ anvil - > data - > { db_data } { $ uuid } { $ table } { modified _date} { $ modified _date} { $ uuid_column } { $ row_uuid } ,
} } ) ;
if ( not $ anvil - > data - > { db_data } { $ uuid } { $ table } { change _date} { $ change _date} { $ uuid_column } { $ row_uuid } )
if ( not $ anvil - > data - > { db_data } { $ uuid } { $ table } { modified _date} { $ modified _date} { $ uuid_column } { $ row_uuid } )
{
# No, so UPDATE it. We'll build the query now...
my $ query = "UPDATE public.$table SET " ;
foreach my $ column_name ( sort { $ a cmp $ b } keys % { $ anvil - > data - > { db_data } { unified } { $ table } { change _date} { $ change _date} { $ uuid_column } { $ row_uuid } } )
foreach my $ column_name ( sort { $ a cmp $ b } keys % { $ anvil - > data - > { db_data } { unified } { $ table } { modified _date} { $ modified _date} { $ uuid_column } { $ row_uuid } } )
{
my $ column_value = $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ anvil - > data - > { db_data } { unified } { $ table } { change _date} { $ change _date} { $ uuid_column } { $ row_uuid } { $ column_name } ) ;
my $ column_value = $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ anvil - > data - > { db_data } { unified } { $ table } { modified _date} { $ modified _date} { $ uuid_column } { $ row_uuid } { $ column_name } ) ;
$ column_value = "NULL" if not defined $ column_value ;
$ column_value =~ s/'NULL'/NULL/g ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
@ -5225,7 +5211,7 @@ sub resync_databases
$ query . = "$column_name = " . $ column_value . ", " ;
}
$ query . = "change _date = " . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ change _date) . "::timestamp AT TIME ZONE 'UTC' WHERE $uuid_column = " . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ row_uuid ) . ";" ;
$ query . = "modified _date = " . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ modified _date) . "::timestamp AT TIME ZONE 'UTC' WHERE $uuid_column = " . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ row_uuid ) . ";" ;
$ anvil - > Log - > entry ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , key = > "log_0074" , variables = > { uuid = > $ uuid , query = > $ query } } ) ;
# Now record the query in the array
@ -5240,9 +5226,9 @@ sub resync_databases
# they're in the same order.
my $ columns = "" ;
my $ values = "" ;
foreach my $ column_name ( sort { $ a cmp $ b } keys % { $ anvil - > data - > { db_data } { unified } { $ table } { change _date} { $ change _date} { $ uuid_column } { $ row_uuid } } )
foreach my $ column_name ( sort { $ a cmp $ b } keys % { $ anvil - > data - > { db_data } { unified } { $ table } { modified _date} { $ modified _date} { $ uuid_column } { $ row_uuid } } )
{
my $ column_value = $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ anvil - > data - > { db_data } { unified } { $ table } { change _date} { $ change _date} { $ uuid_column } { $ row_uuid } { $ column_name } ) ;
my $ column_value = $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ anvil - > data - > { db_data } { unified } { $ table } { modified _date} { $ modified _date} { $ uuid_column } { $ row_uuid } { $ column_name } ) ;
$ column_value =~ s/'NULL'/NULL/g ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
column_name = > $ column_name ,
@ -5256,11 +5242,11 @@ sub resync_databases
'values' = > $ values ,
} } ) ;
my $ query = "INSERT INTO public.$table (" . $ uuid_column . ", " . $ columns . "change _date) VALUES (" . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ row_uuid ) . ", " . $ values . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ change _date) . "::timestamp AT TIME ZONE 'UTC');" ;
my $ query = "INSERT INTO public.$table (" . $ uuid_column . ", " . $ columns . "modified _date) VALUES (" . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ row_uuid ) . ", " . $ values . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ modified _date) . "::timestamp AT TIME ZONE 'UTC');" ;
if ( $ host_column )
{
# Add the host column.
$ query = "INSERT INTO public.$table ($host_column, $uuid_column, " . $ columns . "change _date) VALUES (" . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ anvil - > data - > { sys } { host_uuid } ) . ", " . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ row_uuid ) . ", " . $ values . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ change _date) . "::timestamp AT TIME ZONE 'UTC');" ;
$ query = "INSERT INTO public.$table ($host_column, $uuid_column, " . $ columns . "modified _date) VALUES (" . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ anvil - > data - > { sys } { host_uuid } ) . ", " . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ row_uuid ) . ", " . $ values . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ modified _date) . "::timestamp AT TIME ZONE 'UTC');" ;
}
$ anvil - > Log - > entry ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , key = > "log_0074" , variables = > { uuid = > $ uuid , query = > $ query } } ) ;
@ -5278,9 +5264,9 @@ sub resync_databases
# question of whether the entry for the current
# timestamp exists in the history schema.
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
"db_data::${uuid}::${table}::change_date::${change _date}::${uuid_column}::${row_uuid}" = > $ anvil - > data - > { db_data } { $ uuid } { $ table } { change _date} { $ change _date} { $ uuid_column } { $ row_uuid } ,
"db_data::${uuid}::${table}::modified_date::${modified _date}::${uuid_column}::${row_uuid}" = > $ anvil - > data - > { db_data } { $ uuid } { $ table } { modified _date} { $ modified _date} { $ uuid_column } { $ row_uuid } ,
} } ) ;
if ( not $ anvil - > data - > { db_data } { $ uuid } { $ table } { change _date} { $ change _date} { $ uuid_column } { $ row_uuid } )
if ( not $ anvil - > data - > { db_data } { $ uuid } { $ table } { modified _date} { $ modified _date} { $ uuid_column } { $ row_uuid } )
{
# It hasn't been seen, so INSERT it. We need
# to build entries for the column names and
@ -5288,9 +5274,9 @@ sub resync_databases
# they're in the same order.
my $ columns = "" ;
my $ values = "" ;
foreach my $ column_name ( sort { $ a cmp $ b } keys % { $ anvil - > data - > { db_data } { unified } { $ table } { change _date} { $ change _date} { $ uuid_column } { $ row_uuid } } )
foreach my $ column_name ( sort { $ a cmp $ b } keys % { $ anvil - > data - > { db_data } { unified } { $ table } { modified _date} { $ modified _date} { $ uuid_column } { $ row_uuid } } )
{
my $ column_value = $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ anvil - > data - > { db_data } { unified } { $ table } { change _date} { $ change _date} { $ uuid_column } { $ row_uuid } { $ column_name } ) ;
my $ column_value = $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ anvil - > data - > { db_data } { unified } { $ table } { modified _date} { $ modified _date} { $ uuid_column } { $ row_uuid } { $ column_name } ) ;
$ column_value =~ s/'NULL'/NULL/g ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
column_name = > $ column_name ,
@ -5304,11 +5290,11 @@ sub resync_databases
'values' = > $ values ,
} } ) ;
my $ query = "INSERT INTO history.$table (" . $ uuid_column . ", " . $ columns . "change _date) VALUES (" . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ row_uuid ) . ", " . $ values . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ change _date) . "::timestamp AT TIME ZONE 'UTC');" ;
my $ query = "INSERT INTO history.$table (" . $ uuid_column . ", " . $ columns . "modified _date) VALUES (" . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ row_uuid ) . ", " . $ values . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ modified _date) . "::timestamp AT TIME ZONE 'UTC');" ;
if ( $ host_column )
{
# Add the host column.
$ query = "INSERT INTO history.$table ($host_column, $uuid_column, " . $ columns . "change _date) VALUES (" . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ anvil - > data - > { sys } { host_uuid } ) . ", " . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ row_uuid ) . ", " . $ values . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ change _date) . "::timestamp AT TIME ZONE 'UTC');" ;
$ query = "INSERT INTO history.$table ($host_column, $uuid_column, " . $ columns . "modified _date) VALUES (" . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ anvil - > data - > { sys } { host_uuid } ) . ", " . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ row_uuid ) . ", " . $ values . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ modified _date) . "::timestamp AT TIME ZONE 'UTC');" ;
}
$ anvil - > Log - > entry ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , key = > "log_0074" , variables = > { uuid = > $ uuid , query = > $ query } } ) ;
@ -5318,7 +5304,7 @@ sub resync_databases
} # if seen
} # foreach $id
} # foreach $row_uuid
} # foreach $change _date ...
} # foreach $modified _date ...
# Free up memory by deleting the DB data from the main hash.
delete $ anvil - > data - > { db_data } ;
@ -5462,7 +5448,6 @@ sub write
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { i = > $ i , 'next' = > $ next } } ) ;
foreach my $ this_query ( @ { $ query } )
{
$ anvil - > Database - > _split_query ( { query = > $ query } ) ;
push @ { $ query_set } , $ this_query ;
$ i + + ;
@ -5518,9 +5503,11 @@ sub write
uuid = > $ uuid ,
count = > $ count ,
} } ) ;
# More than one query, so start a transaction block.
$ anvil - > data - > { cache } { database_handle } { $ uuid } - > begin_work ;
if ( $ count )
{
# More than one query, so start a transaction block.
$ anvil - > data - > { cache } { database_handle } { $ uuid } - > begin_work ;
}
foreach my $ query ( @ { $ query_set } )
{
@ -5546,8 +5533,12 @@ sub write
} } ) ;
}
# Commit the changes.
$ anvil - > data - > { cache } { database_handle } { $ uuid } - > commit ( ) ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { count = > $ count } } ) ;
if ( $ count )
{
# Commit the changes.
$ anvil - > data - > { cache } { database_handle } { $ uuid } - > commit ( ) ;
}
}
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { count = > $ count } } ) ;
@ -5649,7 +5640,7 @@ sub _archive_table
# There is enough data to trigger an archive, so lets get started with a list of columns in this
# table.
$ query = "SELECT column_name FROM information_schema.columns WHERE table_schema = 'history' AND table_name = " . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ table ) . " AND column_name != 'history_id' AND column_name != 'change _date';" ;
$ query = "SELECT column_name FROM information_schema.columns WHERE table_schema = 'history' AND table_name = " . $ anvil - > data - > { sys } { database } { use_handle } - > quote ( $ table ) . " AND column_name != 'history_id' AND column_name != 'modified _date';" ;
$ anvil - > Log - > entry ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , key = > "log_0124" , variables = > { query = > $ query } } ) ;
my $ columns = $ anvil - > Database - > query ( { query = > $ query , source = > $ THIS_FILE , line = > __LINE__ } ) ;
@ -5713,7 +5704,7 @@ sub _find_behind_databases
}
# Now, look through the core tables, plus any tables the user might have passed, for differing
# 'change _date' entries, or no entries in one DB with entries in the other (as can happen with a
# 'modified _date' entries, or no entries in one DB with entries in the other (as can happen with a
# newly setup db).
$ anvil - > data - > { sys } { database } { check_tables } = [] ;
@ -5725,7 +5716,7 @@ sub _find_behind_databases
# resync methods.
push @ { $ anvil - > data - > { sys } { database } { check_tables } } , $ table ;
# Preset all tables to have an initial 'change _date' and 'row_count' of 0.
# Preset all tables to have an initial 'modified _date' and 'row_count' of 0.
$ anvil - > data - > { sys } { database } { table } { $ table } { last_updated } = 0 ;
$ anvil - > data - > { sys } { database } { table } { $ table } { row_count } = 0 ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
@ -5782,7 +5773,7 @@ sub _find_behind_databases
my $ schema = $ count ? "history" : "public" ;
$ query = "
SELECT
round ( extract ( epoch from change _date) )
round ( extract ( epoch from modified _date) )
FROM
".$schema." . ".$table." " ;
if ( $ host_column )
@ -5793,7 +5784,7 @@ WHERE
}
$ query . = "
ORDER BY
change _date DESC
modified _date DESC
; " ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
uuid = > $ uuid ,
@ -5811,7 +5802,7 @@ ORDER BY
my $ last_updated = $ results - > [ 0 ] - > [ 0 ] ;
$ last_updated = 0 if not defined $ last_updated ;
# Record this table's last change _date for later comparison. We'll also
# Record this table's last modified _date for later comparison. We'll also
# record the schema and host column, if found, to save looking the same thing
# up later if we do need a resync.
$ anvil - > data - > { sys } { database } { table } { $ table } { id } { $ uuid } { last_updated } = $ last_updated ;
@ -5880,6 +5871,7 @@ ORDER BY
}
last if $ anvil - > data - > { sys } { database } { resync_needed } ;
}
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
"sys::database::resync_needed" = > $ anvil - > data - > { sys } { database } { resync_needed } ,
} } ) ;
@ -5938,161 +5930,6 @@ sub _mark_database_as_behind
return ( 0 ) ;
}
= head2 _split_query
This method takes a query and examines it to see if a copy is needed for the history schema .
It will return two variables ; The original query and , if applicable , the query needed to write to the history schema . A UUID will be generated as needed for the 'change_uuid' columns .
Parameters ;
= head3 query ( required )
This is the query to process .
= cut
sub _split_query
{
my $ self = shift ;
my $ parameter = shift ;
my $ anvil = $ self - > parent ;
my $ debug = defined $ parameter - > { debug } ? $ parameter - > { debug } : 3 ;
$ anvil - > Log - > entry ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , key = > "log_0125" , variables = > { method = > "Database->_test_access()" } } ) ;
my $ query = $ parameter - > { query } ? $ parameter - > { query } : "" ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { query = > $ query } } ) ;
# Flatten the query to make it easier to parse
my $ public_query = "" ;
my $ history_query = "" ;
# Find out if we're doing an INSERT or UPDATE, which schema we're writing to, and if we have or need
# to inject the change_uuid.
my $ parser = SQL::Parser - > new ( ) ;
my $ schema = "public" ;
my $ table = "" ;
my $ command = "" ;
my $ change_date = "" ;
my $ change_uuid = "" ;
my $ success = $ parser - > parse ( $ query ) ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { success = > $ success } } ) ;
if ( $ success )
{
#print Dumper $parser->structure;
$ table = $ parser - > structure - > { org_table_names } - > [ 0 ] ;
$ command = $ parser - > structure - > { command } ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
table = > $ table ,
command = > $ command ,
} } ) ;
if ( $ table =~ /^(.*?)\.(.*)$/ )
{
$ table = $ 1 ;
$ schema = $ 2 ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
table = > $ table ,
schema = > $ schema ,
} } ) ;
}
for ( my $ i = 0 ; $ i < @ { $ parser - > structure - > { column_defs } } ; $ i + + )
{
my $ column_name = $ parser - > structure - > { column_defs } - > [ $ i ] - > { value } ;
my $ column_value = $ parser - > structure - > { 'values' } - > [ 0 ] - > [ $ i ] - > { value } ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
column_name = > $ column_name ,
column_value = > $ column_value ,
} } ) ;
if ( $ column_name eq "change_date" )
{
$ change_date = $ column_value ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { change_date = > $ change_date } } ) ;
}
elsif ( $ column_name eq "change_uuid" )
{
$ change_uuid = $ column_value ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { change_uuid = > $ change_uuid } } ) ;
}
}
}
else
{
print "Failed to parse: [" . $ query . "]\n" ;
}
# Inject the $change_uuid, if needed.
my $ new_query = "" ;
if ( $ change_uuid )
{
# Just strait copy the query to new_query
$ new_query = $ query ;
}
else
{
my $ column_end_seen = 0 ;
$ change_uuid = $ anvil - > Get - > uuid ( { debug = > $ debug } ) ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { change_uuid = > $ change_uuid } } ) ;
foreach my $ line ( split /\n/ , $ query )
{
if ( ( not $ column_end_seen ) && ( $ command eq "INSERT" ) && ( $ line =~ /\)/ ) )
{
$ column_end_seen = 1 ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { ">> line" = > $ line } } ) ;
$ line =~ s/\)/, change_uuid = '$change_uuid')/ ;
$ line =~ s/ ,/,/gs ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { "<< line" = > $ line } } ) ;
}
elsif ( ( not $ column_end_seen ) && ( $ command eq "UPDATE" ) && ( $ line =~ /WHERE/i ) )
{
$ column_end_seen = 1 ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { ">> line" = > $ line } } ) ;
$ line =~ s/WHERE/, change_uuid = '$change_uuid' WHERE/gs ;
$ line =~ s/ ,/,/gs ;
$ line =~ s/ WHERE $/ \nWHERE / ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { "<< line" = > $ line } } ) ;
}
$ new_query . = $ line . "\n" ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { new_query = > $ new_query } } ) ;
}
$ new_query =~ s/\n$//gs ;
if ( $ new_query =~ /\n, change_uuid/gs )
{
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { ">> new_query" = > $ new_query } } ) ;
$ new_query =~ s/\n, change_uuid/, \n change_uuid/gs ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { "<< new_query" = > $ new_query } } ) ;
}
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { new_query = > $ new_query } } ) ;
}
if ( $ schema eq "history" )
{
# Nothing more to do.
$ history_query = $ new_query ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > { history_query = > $ history_query } } ) ;
}
else
{
# Create a copy and prepend 'history.' to the table name
$ public_query = $ new_query ;
$ history_query = $ new_query ;
$ history_query =~ s/ $table/ history.$table/gs ;
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
public_query = > $ public_query ,
history_query = > $ history_query ,
} } ) ;
}
$ anvil - > Log - > variables ( { source = > $ THIS_FILE , line = > __LINE__ , level = > $ debug , list = > {
query = > $ query ,
public_query = > $ public_query ,
history_query = > $ history_query ,
} } ) ;
return ( $ public_query , $ history_query ) ;
}
= head2 _test_access
This method takes a database UUID and tests the connection to it using the DBD 'ping' method . If it fails , open references to the database are removed or replaced , then an attempt to reconnect is made .