Merge branch 'main' into dependabot/npm_and_yarn/striker-ui/babel/traverse-7.23.2

main
Digimer 1 year ago committed by GitHub
commit f89fdae98e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 115
      Anvil/Tools/Database.pm
  2. 19
      scancore-agents/scan-cluster/scan-cluster
  3. 1
      scancore-agents/scan-cluster/scan-cluster.xml
  4. 2
      share/words.xml
  5. 33
      striker-ui-api/README.md
  6. 52
      striker-ui/README.md
  7. 2
      tools/anvil-configure-host
  8. 10
      tools/anvil-daemon
  9. 15
      tools/striker-boot-machine
  10. 16
      tools/striker-manage-peers

@ -105,6 +105,7 @@ my $THIS_FILE = "Database.pm";
# _add_to_local_config # _add_to_local_config
# _age_out_data # _age_out_data
# _archive_table # _archive_table
# _check_for_duplicates
# _find_column # _find_column
# _find_behind_database # _find_behind_database
# _mark_database_as_behind # _mark_database_as_behind
@ -18150,6 +18151,9 @@ sub resync_databases
# We're done with the table data, clear it. # We're done with the table data, clear it.
delete $anvil->data->{sys}{database}{table}; delete $anvil->data->{sys}{database}{table};
# Search for duplicates from the resync
$anvil->Database->_check_for_duplicates({debug => 2});
# Clear the variable that indicates we need a resync. # Clear the variable that indicates we need a resync.
$anvil->data->{sys}{database}{resync_needed} = 0; $anvil->data->{sys}{database}{resync_needed} = 0;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { 'sys::database::resync_needed' => $anvil->data->{sys}{database}{resync_needed} }}); $anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { 'sys::database::resync_needed' => $anvil->data->{sys}{database}{resync_needed} }});
@ -19649,6 +19653,117 @@ COPY history.".$table." (";
} }
=head2 _check_for_duplicates
This method looks for duplicate entries in the database and clears them, if found.
This method takes no parameters
=cut
sub _check_for_duplicates
{
my $self = shift;
my $parameter = shift;
my $anvil = $self->parent;
my $debug = defined $parameter->{debug} ? $parameter->{debug} : 3;
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => $debug, key => "log_0125", variables => { method => "Database->_check_for_duplicates()" }});
my $query = "
SELECT
variable_uuid,
variable_section,
variable_name,
variable_source_table,
variable_source_uuid,
variable_value,
modified_date
FROM
variables
ORDER BY
modified_date DESC;
;";
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { query => $query }});
my $results = $anvil->Database->query({query => $query, source => $THIS_FILE, line => __LINE__});
my $count = @{$results};
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
results => $results,
count => $count,
}});
foreach my $row (@{$results})
{
my $variable_uuid = $row->[0];
my $variable_section = $row->[1];
my $variable_name = $row->[2];
my $variable_source_table = $row->[3] ? $row->[3] : "none";
my $variable_source_uuid = $row->[4] ? $row->[4] : "none";
my $variable_value = $row->[5];
my $modified_date = $row->[6];
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 1, list => {
variable_uuid => $variable_uuid,
variable_section => $variable_section,
variable_name => $variable_name,
variable_source_table => $variable_source_table,
variable_source_uuid => $variable_source_uuid,
variable_value => $variable_value,
modified_date => $modified_date,
}});
if (not $variable_source_table)
{
$variable_source_table = "none";
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 1, list => { variable_source_table => $variable_source_table }});
}
if (not $variable_source_uuid)
{
$variable_source_uuid = "none";
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 1, list => { variable_source_uuid => $variable_source_uuid }});
}
if ((not exists $anvil->data->{duplicate_variables}{$variable_section}{$variable_name}{$variable_source_table}{$variable_source_uuid}) &&
(not $anvil->data->{duplicate_variables}{$variable_section}{$variable_name}{$variable_source_table}{$variable_source_uuid}{variable_uuid}))
{
# Save it.
$anvil->data->{duplicate_variables}{$variable_section}{$variable_name}{$variable_source_table}{$variable_source_uuid}{variable_value} = $variable_value;
$anvil->data->{duplicate_variables}{$variable_section}{$variable_name}{$variable_source_table}{$variable_source_uuid}{variable_uuid} = $variable_uuid;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 1, list => {
"duplicate_variables::${variable_section}::${variable_name}::${variable_source_table}::${variable_source_uuid}::variable_value" => $anvil->data->{duplicate_variables}{$variable_section}{$variable_name}{$variable_source_table}{$variable_source_uuid}{variable_value},
"duplicate_variables::${variable_section}::${variable_name}::${variable_source_table}::${variable_source_uuid}::variable_uuid" => $anvil->data->{duplicate_variables}{$variable_section}{$variable_name}{$variable_source_table}{$variable_source_uuid}{variable_uuid},
}});
}
else
{
# Duplicate! This is older, so delete it.
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 1, list => {
"duplicate_variables::${variable_section}::${variable_name}::${variable_source_table}::${variable_source_uuid}::variable_value" => $anvil->data->{duplicate_variables}{$variable_section}{$variable_name}{$variable_source_table}{$variable_source_uuid}{variable_value},
"duplicate_variables::${variable_section}::${variable_name}::${variable_source_table}::${variable_source_uuid}::variable_uuid" => $anvil->data->{duplicate_variables}{$variable_section}{$variable_name}{$variable_source_table}{$variable_source_uuid}{variable_uuid},
}});
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, priority => "alert", key => "warning_0165", variables => {
section => $variable_section,
name => $variable_name,
source_table => $variable_source_table,
source_uuid => $variable_source_uuid,
value => $variable_value,
}});
my $queries = [];
push @{$queries}, "DELETE FROM history.variables WHERE variable_uuid = ".$anvil->Database->quote($variable_uuid).";";
push @{$queries}, "DELETE FROM variables WHERE variable_uuid = ".$anvil->Database->quote($variable_uuid).";";
foreach my $query (@{$queries})
{
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 1, list => { query => $query }});
}
$anvil->Database->write({query => $queries, source => $THIS_FILE, line => __LINE__});
}
}
# Delete to hash.
delete $anvil->data->{duplicate_variables};
return(0);
}
=head2 _find_column =head2 _find_column
This takes a table name and looks for a column that ends in C<< _host_uuid >> and, if found, stores it in the C<< sys::database::uuid_tables >> array. This takes a table name and looks for a column that ends in C<< _host_uuid >> and, if found, stores it in the C<< sys::database::uuid_tables >> array.

@ -418,7 +418,7 @@ sub check_if_server_failed
my ($anvil, $server) = @_; my ($anvil, $server) = @_;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 3, list => { server => $server }}); $anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 3, list => { server => $server }});
$anvil->Cluster->parse_crm_mon({debug => 3}); $anvil->Cluster->parse_crm_mon({debug => 2});
my $failed = exists $anvil->data->{crm_mon}{parsed}{'pacemaker-result'}{resources}{resource}{$server}{variables}{failed} ? $anvil->data->{crm_mon}{parsed}{'pacemaker-result'}{resources}{resource}{$server}{variables}{failed} : 0; my $failed = exists $anvil->data->{crm_mon}{parsed}{'pacemaker-result'}{resources}{resource}{$server}{variables}{failed} ? $anvil->data->{crm_mon}{parsed}{'pacemaker-result'}{resources}{resource}{$server}{variables}{failed} : 0;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 3, list => { failed => $failed }}); $anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 3, list => { failed => $failed }});
if ($failed eq "true") if ($failed eq "true")
@ -708,15 +708,24 @@ INSERT INTO
$anvil->Database->get_anvils(); $anvil->Database->get_anvils();
foreach my $scan_cluster_node_name (sort {$a cmp $b} keys %{$anvil->data->{cib}{parsed}{data}{node}}) foreach my $scan_cluster_node_name (sort {$a cmp $b} keys %{$anvil->data->{cib}{parsed}{data}{node}})
{ {
my $scan_cluster_node_host_uuid = $anvil->Get->host_uuid_from_name({host_name => $scan_cluster_node_name}); my $scan_cluster_node_host_uuid = $anvil->Get->host_uuid_from_name({host_name => $scan_cluster_node_name}) // "";
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 2, list => {
scan_cluster_node_name => $scan_cluster_node_name,
scan_cluster_node_host_uuid => $scan_cluster_node_host_uuid,
}});
if (not $scan_cluster_node_host_uuid)
{
# Something is wrong with this host. Does the hostname match to node name?
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, key => "scan_cluster_alert_0016", variables => { node_name => $scan_cluster_node_name }});
next;
}
my $scan_cluster_node_pacemaker_id = $anvil->data->{cib}{parsed}{data}{node}{$scan_cluster_node_name}{node_state}{pacemaker_id}; my $scan_cluster_node_pacemaker_id = $anvil->data->{cib}{parsed}{data}{node}{$scan_cluster_node_name}{node_state}{pacemaker_id};
my $scan_cluster_node_in_ccm = $anvil->data->{cib}{parsed}{data}{node}{$scan_cluster_node_name}{node_state}{in_ccm}; my $scan_cluster_node_in_ccm = $anvil->data->{cib}{parsed}{data}{node}{$scan_cluster_node_name}{node_state}{in_ccm};
my $scan_cluster_node_crmd_member = $anvil->data->{cib}{parsed}{data}{node}{$scan_cluster_node_name}{node_state}{crmd}; my $scan_cluster_node_crmd_member = $anvil->data->{cib}{parsed}{data}{node}{$scan_cluster_node_name}{node_state}{crmd};
my $scan_cluster_node_cluster_member = $anvil->data->{cib}{parsed}{data}{node}{$scan_cluster_node_name}{node_state}{'join'}; my $scan_cluster_node_cluster_member = $anvil->data->{cib}{parsed}{data}{node}{$scan_cluster_node_name}{node_state}{'join'};
my $scan_cluster_node_maintenance_mode = $anvil->data->{cib}{parsed}{data}{node}{$scan_cluster_node_name}{node_state}{'maintenance-mode'}; my $scan_cluster_node_maintenance_mode = $anvil->data->{cib}{parsed}{data}{node}{$scan_cluster_node_name}{node_state}{'maintenance-mode'};
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 2, list => { $anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 2, list => {
scan_cluster_node_name => $scan_cluster_node_name,
scan_cluster_node_host_uuid => $scan_cluster_node_host_uuid,
scan_cluster_node_pacemaker_id => $scan_cluster_node_pacemaker_id, scan_cluster_node_pacemaker_id => $scan_cluster_node_pacemaker_id,
scan_cluster_node_in_ccm => $scan_cluster_node_in_ccm, scan_cluster_node_in_ccm => $scan_cluster_node_in_ccm,
scan_cluster_node_crmd_member => $scan_cluster_node_crmd_member, scan_cluster_node_crmd_member => $scan_cluster_node_crmd_member,
@ -1044,7 +1053,7 @@ sub collect_data
my ($anvil) = @_; my ($anvil) = @_;
# Pick out core cluster details. # Pick out core cluster details.
my $problem = $anvil->Cluster->parse_cib({debug => 3}); my $problem = $anvil->Cluster->parse_cib({debug => 2});
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 2, list => { problem => $problem }}); $anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 2, list => { problem => $problem }});
# If there was a problem, we're not in the cluster. # If there was a problem, we're not in the cluster.

@ -40,6 +40,7 @@ In Maintenance Mode: ..... [#!variable!maintenance_mode!#]
<key name="scan_cluster_alert_0013">The server: [#!variable!server!#] was found to be failed in pacemaker, but it was successfully recovered. This does NOT mean the server rebooted, but it may have. Checking the server is advised.</key> <key name="scan_cluster_alert_0013">The server: [#!variable!server!#] was found to be failed in pacemaker, but it was successfully recovered. This does NOT mean the server rebooted, but it may have. Checking the server is advised.</key>
<key name="scan_cluster_alert_0014">The server: [#!variable!server!#] was found to be failed in pacemaker. The attempt to recover it appears to have failed. The server might well still be running ok, checking the server is advised.</key> <key name="scan_cluster_alert_0014">The server: [#!variable!server!#] was found to be failed in pacemaker. The attempt to recover it appears to have failed. The server might well still be running ok, checking the server is advised.</key>
<key name="scan_cluster_alert_0015">The server: [#!variable!server!#] had been found to be failed in pacemaker. It's now recovered. This does NOT mean the server rebooted, but it may have. Checking the server is advised.</key> <key name="scan_cluster_alert_0015">The server: [#!variable!server!#] had been found to be failed in pacemaker. It's now recovered. This does NOT mean the server rebooted, but it may have. Checking the server is advised.</key>
<key name="scan_cluster_alert_0016">The node name: [#!variable!node_name!#] failed to translate to a host UUID. Does the node name match the host name?</key>
<!-- Log entries --> <!-- Log entries -->
<key name="scan_cluster_log_0001">Starting: [#!variable!program!#].</key> <key name="scan_cluster_log_0001">Starting: [#!variable!program!#].</key>

@ -3179,6 +3179,7 @@ Proceed? [y/N]</key>
<key name="message_0354">This host is now configured to map the network.</key> <key name="message_0354">This host is now configured to map the network.</key>
<key name="message_0355">This host is already NOT configured to map the network.</key> <key name="message_0355">This host is already NOT configured to map the network.</key>
<key name="message_0356">This host is no longer configured to map the network.</key> <key name="message_0356">This host is no longer configured to map the network.</key>
<key name="message_0357">No hosts with IPMI found, done.</key>
<!-- Translate names (protocols, etc) --> <!-- Translate names (protocols, etc) -->
<key name="name_0001">Normal Password</key> <!-- none in mail-server --> <key name="name_0001">Normal Password</key> <!-- none in mail-server -->
@ -3952,6 +3953,7 @@ We will try to proceed anyway.</key>
</key> </key>
<key name="warning_0163"><![CDATA[Failed to read the definition file: [#!variable!file!#] for the server: [#!variable!server_name!#] on the host: [#!variable!host_name!#]. If the host is online, it should update the next time scan-server runs.]]></key> <key name="warning_0163"><![CDATA[Failed to read the definition file: [#!variable!file!#] for the server: [#!variable!server_name!#] on the host: [#!variable!host_name!#]. If the host is online, it should update the next time scan-server runs.]]></key>
<key name="warning_0164"><![CDATA[Failed to update the definition file: [#!variable!file!#] for the server: [#!variable!server_name!#] on the host: [#!variable!host_name!#]. If the host is online, it should update the next time scan-server runs.]]></key> <key name="warning_0164"><![CDATA[Failed to update the definition file: [#!variable!file!#] for the server: [#!variable!server_name!#] on the host: [#!variable!host_name!#]. If the host is online, it should update the next time scan-server runs.]]></key>
<key name="warning_0165"><![CDATA[A duplicate variable was found! Section: [#!variable!section!#], name: [#!variable!name!#], source table: [#!variable!source_table!#], source_uuid: [#!variable!source_uuid!#], value: [#!variable!value!#]. This is older, so it will be deleted.]]></key>
</language> </language>
<!-- 日本語 --> <!-- 日本語 -->

@ -0,0 +1,33 @@
# Anvil system striker web interface API
## About NPM projects
In essence, this module and the striker web interface module share the same management workflows:
* All `npm` commands must be executed at the project root or lower level(s).
* To prepare the workspace, run `npm install`.
* To produce a production build, run `npm run build`.
One major difference is there's no live development mode in this project.
See the striker we interface's [README](../striker-ui/README.md) for more details.
## Run prerequisites
* This API module is targetted at NodeJS version 10, which is the default on CentOS/RHEL 8.
* All executables/files listed in `src/lib/consts/SERVER_PATHS.ts` and their respective dependencies are required.
## Build
Run `npm run build` to produce a minified script at `out/index.js`. The output script can be executed with NodeJS assuming all prerequisites are met.
There's no need to remove the old build prior to a new build because the build process always overwrites the one file.
`systemd` expects the build to be placed exactly at `/usr/share/striker-ui-api/index.js` on a striker.
## Logs
At the time of writing, no logging library was added. Logs are either `stdout` or `stderr` without levels. When the API runs as a service, its logs can be viewed with `journalctl --unit striker-ui-api`.
Due to the large amount of logs produced, it's highly recommended to note the time of a test, and specify a time frame with `journalctl --since <date parsable time> --until <date parsable time>` to help with the search.
## Systemd service
The service file of this API module is located in `../units/`. Environment varibles can be set with the `Environment=<variable name>=<value>` directive, i.e., to set the main server's port to `80`, use `Environment=PORT=80`.
## Environment varibles
Variables can be set to affect the API's funtionalities, i.e. listen on a different port. A complete list with explanations is located at `src/lib/consts/ENV.ts`

@ -1,34 +1,46 @@
This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app). # Anvil system striker web interface
## Getting Started # Notes
First, run the development server: - All NPM commands **must** be executed at the root folder of this UI module, where the `package.json` is located.
- For those unfamiliar with git and/or the web technologies involved, it's highly recommended to clone a fresh repo as a test workspace.
```bash ## Development prerequisites
npm run dev
# or
yarn dev
```
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. - NodeJS LTS is recommended, version >= 14 is required.
- NPM latest is recommended, version >= 6 is required.
You can start editing the page by modifying `pages/index.js`. The page auto-updates as you edit the file. "required" means this project was started with roughly the specified version; earlier version may work.
[API routes](https://nextjs.org/docs/api-routes/introduction) can be accessed on [http://localhost:3000/api/hello](http://localhost:3000/api/hello). This endpoint can be edited in `pages/api/hello.js`. For Fedora, NPM is included as a dependency of NodeJS; running `sudo dnf install nodejs` should be enough.
The `pages/api` directory is mapped to `/api/*`. Files in this directory are treated as [API routes](https://nextjs.org/docs/api-routes/introduction) instead of React pages. ## Installation
## Learn More Run `npm install` to download all production and development dependencies. The NPM packages will be collectively placed into a folder named `node_modules` in the root of this project.
To learn more about Next.js, take a look at the following resources: ## Live development
- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API. Run `npm run dev` to start a local development server that listens on port 3000. Any source code changes after the dev server has started will trigger a quick partial rebuild, the changes will be reflected shortly in the browser.
- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.
You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome! However, it's highly recommended to perform a clear-cache reload, which is usually `CTRL` + `F5`, because changes may not always reflect correctly.
## Deploy on Vercel ## Production build
The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js. Run `rm -rf out && npm run build` to remove the existing build output and generate a new one. It's highly recommended to remove the old build before building a new one because the some of the generated files won't replace the old files, thus old files will remain when the whole output directory gets committed/copied.
Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details. The build is expected to be placed into the `/var/www/html/` directory on a striker.
At the time of writing, the build is committed to the repository to keep the whole project's building process offline. NPM requires network to fetch dependencies before building. Ideally, the build shouldn't be included, but we cannot remove the it from repo until there's a reliable way to separate the download and build tasks.
## Logs
At the time of writing, no logging library has been added due to other priorities. The recommended debug logging is to temporarily add any appropriate `console` functions to suspicious areas, i.e. before and after the location where an exception was thrown.
## Test with striker API
Most of the API requires authentication to access. At the time of writing, the striker API can only produce cookies with its domain (including port) after successfully authenticating a user. Therefore, the striker UI must be accessed from the same location to allow the browser to read the session cookies.
There are 2 tested methods to achive same-domain:
1. Make changes to the source and produce a **production** build. Copy the new build to the striker and access the UI by connecting to the striker.
2. Install a proxy/load balance server, forward `<domain>/` (root) to the server hosting the web UI, and forward `<domain>/api/` to the API. When accessing the UI, the browser only accesses the proxy and will consider the two locations to be under the same domain. thus it will see the cookies produced by authentication. A recommended server with easy-to-understand configuration is `nginx`; it's available via `dnf`.

@ -50,7 +50,7 @@ if (($< != 0) && ($> != 0))
} }
# Connect # Connect
$anvil->Database->connect(); $anvil->Database->connect({debug => 2, check_for_resync => 1});
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, 'print' => 1, key => "message_0031"}); $anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, 'print' => 1, key => "message_0031"});
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 2, 'print' => 1, key => "log_0132"}); $anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 2, 'print' => 1, key => "log_0132"});
if (not $anvil->data->{sys}{database}{connections}) if (not $anvil->data->{sys}{database}{connections})

@ -580,6 +580,16 @@ sub handle_periodic_tasks
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 2, list => { host_type => $host_type }}); $anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 2, list => { host_type => $host_type }});
if ($host_type eq "striker") if ($host_type eq "striker")
{ {
# Look for duplicates if we're the primary DB.
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 2, list => {
"sys::database::primary_db" => $anvil->data->{sys}{database}{primary_db},
"Get->host_uuid" => $anvil->Get->host_uuid,
}});
if ($anvil->Get->host_uuid eq $anvil->data->{sys}{database}{primary_db})
{
$anvil->Database->_check_for_duplicates({debug => 2});
}
# This can take a while, but it's been optimized to minimize how long it takes to # This can take a while, but it's been optimized to minimize how long it takes to
# run. To be safe, we'll still background it. # run. To be safe, we'll still background it.
my $shell_call = $anvil->data->{path}{exe}{'striker-get-screenshots'}.$anvil->Log->switches; my $shell_call = $anvil->data->{path}{exe}{'striker-get-screenshots'}.$anvil->Log->switches;

@ -151,11 +151,16 @@ sub find_boot_method
} }
my $host_count = @{$hosts}; my $host_count = @{$hosts};
my $steps = int((80 / $host_count) / 3); $anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 2, list => { host_count => $host_count }});
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 2, list => { if (not $host_count)
host_count => $host_count, {
steps => $steps, $anvil->Log->entry({source => $THIS_FILE, line => __LINE__, 'print' => 1, level => 1, key => "message_0357"});
}}); $anvil->Job->update_progress({progress => 100, message => "message_0357"});
$anvil->nice_exit({exit_code => 0});
}
my $steps = int((80 / $host_count) / 3);
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 2, list => { steps => $steps }});
$anvil->data->{sys}{progress} = 5; $anvil->data->{sys}{progress} = 5;
foreach my $host_name (sort {$a cmp $b} @{$hosts}) foreach my $host_name (sort {$a cmp $b} @{$hosts})

@ -60,7 +60,11 @@ if (($< != 0) && ($> != 0))
} }
# We'll try to connect in case we're adding additional peers. # We'll try to connect in case we're adding additional peers.
$anvil->Database->connect(); $anvil->data->{sys}{database}{resync_needed} = 1;
$anvil->Database->connect({
debug => 2,
check_for_resync => 1,
});
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 2, secure => 0, key => "log_0132"}); $anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 2, secure => 0, key => "log_0132"});
# Am I adding, editing or deleting? # Am I adding, editing or deleting?
@ -537,7 +541,8 @@ sub process_entry
# Re-read the config. # Re-read the config.
$anvil->refresh(); $anvil->refresh();
# Connect, and configure, if needed. # Flag a resync, connect, and configure, if needed.
$anvil->data->{sys}{database}{resync_needed} = 1;
$anvil->Database->connect({ $anvil->Database->connect({
debug => 3, debug => 3,
check_for_resync => 1, check_for_resync => 1,
@ -621,7 +626,12 @@ sub process_entry
sleep 1; sleep 1;
$anvil->Database->connect({check_for_resync => 1, db_uuid => $host_uuid}); $anvil->data->{sys}{database}{resync_needed} = 1;
$anvil->Database->connect({
debug => 2,
check_for_resync => 1,
db_uuid => $host_uuid,
});
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 2, secure => 0, key => "log_0132"}); $anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 2, secure => 0, key => "log_0132"});
} }
} }

Loading…
Cancel
Save