* Got server migration working using ocf:alteeve:server in a test environment!

* Converted most 'eval { }' calls to localize $@ and test the output of the eval, instead of checking to see if $@ was set.
* Converted all 'local' hash references to instead use the short host name of the local machine as a new standard.

Signed-off-by: Digimer <digimer@alteeve.ca>
main
Digimer 4 years ago
parent 30f2b3fa8e
commit 1498e1b53c
  1. 1
      Anvil/Tools/Cluster.pm
  2. 6
      Anvil/Tools/DRBD.pm
  3. 5
      Anvil/Tools/Database.pm
  4. 6
      Anvil/Tools/Get.pm
  5. 8
      Anvil/Tools/Server.pm
  6. 9
      Anvil/Tools/Striker.pm
  7. 16
      Anvil/Tools/System.pm
  8. 4
      Anvil/Tools/Words.pm
  9. 11
      cgi-bin/striker
  10. 9
      ocf/alteeve/server
  11. 3
      share/words.xml
  12. 10
      tools/anvil-join-anvil
  13. 7
      tools/fence_pacemaker
  14. 180
      tools/test.pl

@ -310,6 +310,7 @@ sub parse_cib
}
else
{
local $@;
my $dom = eval { XML::LibXML->load_xml(string => $cib_data); };
if ($@)
{

@ -8,6 +8,7 @@ use warnings;
use Data::Dumper;
use Scalar::Util qw(weaken isweak);
use Text::Diff;
use JSON;
our $VERSION = "3.0.0";
my $THIS_FILE = "DRBD.pm";
@ -311,10 +312,11 @@ sub get_devices
delete $anvil->data->{drbd}{config}{$host};
}
local $@;
my $xml = XML::Simple->new();
my $dump_xml = "";
eval { $dump_xml = $xml->XMLin($output, KeyAttr => {}, ForceArray => 1) };
if ($@)
my $test = eval { $dump_xml = $xml->XMLin($output, KeyAttr => {}, ForceArray => 1) };
if (not $test)
{
chomp $@;
my $error = "[ Error ] - The was a problem parsing: [$output]. The error was:\n";

@ -1027,12 +1027,13 @@ sub connect
db_connect_string => $db_connect_string,
user => $user,
}});
eval { $dbh = DBI->connect($db_connect_string, $user, $password, {
local $@;
my $test = eval { $dbh = DBI->connect($db_connect_string, $user, $password, {
RaiseError => 1,
AutoCommit => 1,
pg_enable_utf8 => 1
}); };
if ($@)
if (not $test)
{
# Something went wrong...
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 1, priority => "alert", key => "log_0064", variables => {

@ -10,6 +10,7 @@ use Data::Dumper;
use Encode;
use UUID::Tiny qw(:std);
use Net::Netmask;
use JSON;
our $VERSION = "3.0.0";
my $THIS_FILE = "Get.pm";
@ -272,10 +273,11 @@ sub bridges
delete $anvil->data->{$host}{network}{bridges};
};
local $@;
my $bridge_data = "";
my $json = JSON->new->allow_nonref;
eval { $bridge_data = $json->decode($output); };
if ($@)
my $test = eval { $bridge_data = $json->decode($output); };
if (not $test)
{
# JSON parse failed.
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 0, key => "error_0140", variables => {

@ -650,6 +650,8 @@ sub migrate
});
}
# This logs the path down to the resources under the servers, helps in the next step to enable dual
# primary fails.
foreach my $source (sort {$a cmp $b} keys %{$anvil->data->{server}})
{
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { source => $source }});
@ -662,7 +664,6 @@ sub migrate
}
}
}
die;
# Enable dual-primary for any resources we know about for this server.
foreach my $resource (sort {$a cmp $b} keys %{$anvil->data->{server}{$source}{$server}{resource}})
@ -996,10 +997,11 @@ sub _parse_definition
return(1);
}
local $@;
my $xml = XML::Simple->new();
my $server_xml = "";
eval { $server_xml = $xml->XMLin($definition, KeyAttr => {}, ForceArray => 1) };
if ($@)
my $test = eval { $server_xml = $xml->XMLin($definition, KeyAttr => {}, ForceArray => 1) };
if (not $test)
{
chomp $@;
my $error = "[ Error ] - The was a problem parsing: [$definition]. The error was:\n";

@ -265,6 +265,10 @@ sub get_fence_data
xml_body => $xml_body,
eval_error => $@,
}});
# Clear the error so it doesn't propogate out to a future 'die' and confuse things.
$@ = '';
return(1);
}
}
@ -899,10 +903,11 @@ WHERE
$anvil->Database->get_fences({debug => $debug});
# Parse the XML.
local $@;
my $parsed_xml = "";
my $xml = XML::Simple->new();
eval { $parsed_xml = $xml->XMLin($manifest_xml, KeyAttr => { key => 'name' }, ForceArray => []) };
if ($@)
my $test = eval { $parsed_xml = $xml->XMLin($manifest_xml, KeyAttr => { key => 'name' }, ForceArray => []) };
if (not $test)
{
chomp $@;
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, 'print' => 1, level => 0, priority => "err", key => "error_0111", variables => {

@ -3054,11 +3054,12 @@ sub manage_firewall
# Read the XML to see what services are opened already and translate those into port numbers and
# protocols.
local $@;
my $open_services = [];
my $xml = XML::Simple->new();
my $body = "";
eval { $body = $xml->XMLin($zone_file, KeyAttr => { language => 'name', key => 'name' }, ForceArray => [ 'service' ]) };
if ($@)
my $test = eval { $body = $xml->XMLin($zone_file, KeyAttr => { language => 'name', key => 'name' }, ForceArray => [ 'service' ]) };
if (not $test)
{
chomp $@;
my $error = "[ Error ] - The was a problem reading: [$zone_file]. The error was:\n";
@ -3066,6 +3067,9 @@ sub manage_firewall
$error .= $@."\n";
$error .= "===========================================================\n";
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 0, priority => "err", raw => $error});
# Clear the error so it doesn't propogate out to a future 'die' and confuse things.
$@ = '';
}
else
{
@ -4473,10 +4477,11 @@ sub _load_specific_firewalld_zone
return("!!error!!");
}
local $@;
my $xml = XML::Simple->new();
my $body = "";
eval { $body = $xml->XMLin($full_path, KeyAttr => { language => 'name', key => 'name' }, ForceArray => [ 'port' ]) };
if ($@)
my $test = eval { $body = $xml->XMLin($full_path, KeyAttr => { language => 'name', key => 'name' }, ForceArray => [ 'port' ]) };
if (not $test)
{
chomp $@;
my $error = "[ Error ] - The was a problem reading: [$full_path]. The error was:\n";
@ -4484,6 +4489,9 @@ sub _load_specific_firewalld_zone
$error .= $@."\n";
$error .= "===========================================================\n";
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 0, priority => "err", raw => $error});
# Clear the error so it doesn't propogate out to a future 'die' and confuse things.
$@ = '';
}
else
{

@ -545,6 +545,7 @@ sub read
}
# Read the file with XML::Simple
local $@;
my $xml = XML::Simple->new();
eval { $anvil->data->{words}{$file} = $xml->XMLin($file, KeyAttr => { language => 'name', key => 'name' }, ForceArray => [ 'language', 'key' ]) };
if ($@)
@ -555,8 +556,7 @@ sub read
$error .= $@."\n";
$error .= "===========================================================\n";
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => 0, priority => "err", raw => $error});
$return_code = 4;
die;
$anvil->nice_exit({exit_code => 4});
}
else
{

@ -8073,11 +8073,12 @@ sub get_network_details
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 3, list => { file => $file }});
# Parse...
local $@;
my $xml = XML::Simple->new();
my $data = "";
my $network = "";
eval { $data = $xml->XMLin($file, KeyAttr => { interface => 'name', key => 'name', ip => 'address' }, ForceArray => [ 'interface', 'key' ] ) };
if ($@)
my $test = eval { $data = $xml->XMLin($file, KeyAttr => { interface => 'name', key => 'name', ip => 'address' }, ForceArray => [ 'interface', 'key' ] ) };
if (not $test)
{
chomp $@;
my $error = "[ Error ] - The was a problem reading: [$file]. The error was:\n";
@ -8183,11 +8184,13 @@ sub get_network_details_form
my $file = $anvil->data->{path}{directories}{html}."/status/network.xml";
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 3, list => { file => $file }});
local $@;
my $xml = XML::Simple->new();
my $data = "";
my $network = "";
eval { $data = $xml->XMLin($file, KeyAttr => { interface => 'name', key => 'name' }, ForceArray => [ 'interface', 'key' ]) };
if ($@)
my $test = eval { $data = $xml->XMLin($file, KeyAttr => { interface => 'name', key => 'name' }, ForceArray => [ 'interface', 'key' ]) };
if (not $test)
{
chomp $@;
my $error = "[ Error ] - The was a problem reading: [$file]. The error was:\n";

@ -731,9 +731,10 @@ sub start_drbd_resource
{
my ($anvil) = @_;
my $server = $anvil->data->{environment}{OCF_RESKEY_name};
my $host = $anvil->_short_host_name;
my $peer = $anvil->data->{drbd}{config}{$host}{peer};
my $local_host = $anvil->_short_host_name();
my $server = $anvil->data->{environment}{OCF_RESKEY_name};
my $host = $anvil->_short_host_name;
my $peer = $anvil->data->{drbd}{config}{$host}{peer};
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 2, list => {
server => $server,
host => $host,
@ -1177,6 +1178,7 @@ sub migrate_server
# missing.
# If we're given 'migrate_from', we're pulling the server towards us, so we can check both brdiges
# and storage.
my $local_host = $anvil->_short_host_name();
my $server = $anvil->data->{environment}{OCF_RESKEY_name};
my $source = $anvil->data->{environment}{OCF_RESKEY_CRM_meta_migrate_source};
my $target = $anvil->data->{environment}{OCF_RESKEY_CRM_meta_migrate_target};
@ -1693,7 +1695,6 @@ sub validate_name
{
my ($anvil) = @_;
my $local_host = $anvil->_short_host_name();
my $local_host = $anvil->_short_host_name();
my $server = $anvil->data->{environment}{OCF_RESKEY_name};
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => 3, list => {

@ -348,7 +348,7 @@ Failure! The return code: [#!variable!return_code!#] was received ('0' was expec
<key name="job_0089">Adding NTP (network time protocol) servers, if needed.</key>
<key name="job_0090">Adding the NTP server: [#!variable!server!#].</key>
<key name="job_0091">Restarting the daemon: [#!variable!daemon!#].</key>
<key name="job_0092"><![CDATA[[ Error ] - The Job: [#!variabe!job-uuid!#] appears to have passed malformed data. The raw data was: [#!variable!raw!#]. Expected 'as_machine=<host_type>,manifest_uuid=<manifest_uuid>,anvil_uuid=<anvil_uuid>'. Either the parse failed, or the data was somehow invalid.]]></key>
<key name="job_0092"><![CDATA[[ Error ] - The Job: [#!variable!job-uuid!#] appears to have passed malformed data. The raw data was: [#!variable!raw!#]. Expected 'as_machine=<host_type>,manifest_uuid=<manifest_uuid>,anvil_uuid=<anvil_uuid>'. Either the parse failed, or the data was somehow invalid.]]></key>
<key name="job_0093">Updated the password for the: [#!variable!user!#] user.</key>
<key name="job_0094">Enabled and started the daemon: [#!variable!daemon!#].</key>
<key name="job_0095">Disable and stop the daemon: [#!variable!daemon!#].</key>
@ -386,6 +386,7 @@ Failure! The return code: [#!variable!return_code!#] was received ('0' was expec
<key name="job_0127">Update completed successfully.</key>
<key name="job_0128">Update not required, nothing changed.</key>
<key name="job_0129">Completed joining the #!string!brand_0002!#.</key>
<key name="job_0130">No job was found that needs to be run.</key>
<!-- Log entries -->
<key name="log_0001">Starting: [#!variable!program!#].</key>

@ -10,6 +10,7 @@
# 4 = Failed to reconnect to any database after the network was reconfigured
# 5 = Problem parsing job data or loading manifest or anvil data using job data.
# 6 = A pcs call failed.
# 7 = No job was found to run.
#
# TODO:
# -
@ -1830,6 +1831,13 @@ sub load_job
"jobs::job_uuid" => $anvil->data->{jobs}{job_uuid},
}});
if (not $anvil->data->{jobs}{job_uuid})
{
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, 'print' => 1, level => 0, priority => "err", key => "job_0130"});
sleep 1;
$anvil->nice_exit({exit_code => 7});
}
$anvil->data->{job}{progress} = 0;
update_progress($anvil, $anvil->data->{job}{progress}, "clear");
update_progress($anvil, ($anvil->data->{job}{progress} += 1), "job_0074,!!job-uuid!".$anvil->data->{switches}{'job-uuid'}."!!");
@ -1851,7 +1859,7 @@ sub load_job
{
# Terminate the job entirely, it's likely an unrecoverable problem.
update_progress($anvil, 100, "job_0092,!!job-uuid!".$anvil->data->{switches}{'job-uuid'}."!!,!!raw!".$anvil->data->{jobs}{job_data}."!!");
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, 'print' => 1, level => 2, key => "job_0092", variables => {
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, 'print' => 1, level => 0, priority => "err", key => "job_0092", variables => {
'job-uuid' => $anvil->data->{switches}{'job-uuid'},
raw => $anvil->data->{jobs}{job_data},
}});

@ -412,9 +412,10 @@ sub read_cib
if (($xml_opened) && ($xml_closed))
{
# We're good
my $xml = XML::Simple->new();
eval { $body = $xml->XMLin($cib, KeyAttr => { language => 'name', key => 'name' }, ForceArray => [ 'id' ]) };
if ($@)
local $@;
my $xml = XML::Simple->new();
my $test = eval { $body = $xml->XMLin($cib, KeyAttr => { language => 'name', key => 'name' }, ForceArray => [ 'id' ]) };
if (not $test)
{
chomp $@;
my $error = "[ Error ] - The was a problem parsing: [$cib]. The error was:\n";

@ -29,181 +29,7 @@ $anvil->Database->connect();
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, 'print' => 1, level => 2, secure => 0, key => "log_0132"});
$anvil->Get->switches;
my $xml = '<cib crm_feature_set="3.3.0" validate-with="pacemaker-3.2" epoch="322" num_updates="6" admin_epoch="0" cib-last-written="Sun Aug 16 18:24:22 2020" update-origin="mk-a02n01" update-client="cibadmin" update-user="root" have-quorum="1" dc-uuid="2">
<configuration>
<crm_config>
<cluster_property_set id="cib-bootstrap-options">
<nvpair id="cib-bootstrap-options-have-watchdog" name="have-watchdog" value="false"/>
<nvpair id="cib-bootstrap-options-dc-version" name="dc-version" value="2.0.3-5.el8_2.1-4b1f869f0f"/>
<nvpair id="cib-bootstrap-options-cluster-infrastructure" name="cluster-infrastructure" value="corosync"/>
<nvpair id="cib-bootstrap-options-cluster-name" name="cluster-name" value="mk-anvil-02"/>
<nvpair id="cib-bootstrap-options-stonith-max-attempts" name="stonith-max-attempts" value="INFINITY"/>
<nvpair id="cib-bootstrap-options-stonith-enabled" name="stonith-enabled" value="true"/>
<nvpair id="cib-bootstrap-options-maintenance-mode" name="maintenance-mode" value="true"/>
<nvpair id="cib-bootstrap-options-last-lrm-refresh" name="last-lrm-refresh" value="1597445952"/>
</cluster_property_set>
</crm_config>
<nodes>
<node id="1" uname="mk-a02n01"/>
<node id="2" uname="mk-a02n02"/>
</nodes>
<resources>
<primitive class="stonith" id="ipmilan_node1" type="fence_ipmilan">
<instance_attributes id="ipmilan_node1-instance_attributes">
<nvpair id="ipmilan_node1-instance_attributes-ipaddr" name="ipaddr" value="10.201.13.1"/>
<nvpair id="ipmilan_node1-instance_attributes-password" name="password" value="another secret p"/>
<nvpair id="ipmilan_node1-instance_attributes-pcmk_host_list" name="pcmk_host_list" value="mk-a02n01"/>
<nvpair id="ipmilan_node1-instance_attributes-username" name="username" value="admin"/>
</instance_attributes>
<operations>
<op id="ipmilan_node1-monitor-interval-60" interval="60" name="monitor"/>
</operations>
</primitive>
<primitive class="stonith" id="apc_snmp_node1_mk-pdu01" type="fence_apc_snmp">
<instance_attributes id="apc_snmp_node1_mk-pdu01-instance_attributes">
<nvpair id="apc_snmp_node1_mk-pdu01-instance_attributes-ip" name="ip" value="10.201.2.3"/>
<nvpair id="apc_snmp_node1_mk-pdu01-instance_attributes-pcmk_host_list" name="pcmk_host_list" value="mk-a02n01"/>
<nvpair id="apc_snmp_node1_mk-pdu01-instance_attributes-pcmk_off_action" name="pcmk_off_action" value="reboot"/>
<nvpair id="apc_snmp_node1_mk-pdu01-instance_attributes-port" name="port" value="3"/>
</instance_attributes>
<operations>
<op id="apc_snmp_node1_mk-pdu01-monitor-interval-60" interval="60" name="monitor"/>
</operations>
</primitive>
<primitive class="stonith" id="apc_snmp_node1_mk-pdu02" type="fence_apc_snmp">
<instance_attributes id="apc_snmp_node1_mk-pdu02-instance_attributes">
<nvpair id="apc_snmp_node1_mk-pdu02-instance_attributes-ip" name="ip" value="10.201.2.4"/>
<nvpair id="apc_snmp_node1_mk-pdu02-instance_attributes-pcmk_host_list" name="pcmk_host_list" value="mk-a02n01"/>
<nvpair id="apc_snmp_node1_mk-pdu02-instance_attributes-pcmk_off_action" name="pcmk_off_action" value="reboot"/>
<nvpair id="apc_snmp_node1_mk-pdu02-instance_attributes-port" name="port" value="3"/>
</instance_attributes>
<operations>
<op id="apc_snmp_node1_mk-pdu02-monitor-interval-60" interval="60" name="monitor"/>
</operations>
</primitive>
<primitive class="stonith" id="ipmilan_node2" type="fence_ipmilan">
<instance_attributes id="ipmilan_node2-instance_attributes">
<nvpair id="ipmilan_node2-instance_attributes-ipaddr" name="ipaddr" value="10.201.13.2"/>
<nvpair id="ipmilan_node2-instance_attributes-password" name="password" value="another secret p"/>
<nvpair id="ipmilan_node2-instance_attributes-pcmk_host_list" name="pcmk_host_list" value="mk-a02n02"/>
<nvpair id="ipmilan_node2-instance_attributes-username" name="username" value="admin"/>
</instance_attributes>
<operations>
<op id="ipmilan_node2-monitor-interval-60" interval="60" name="monitor"/>
</operations>
</primitive>
<primitive class="stonith" id="apc_snmp_node2_mk-pdu01" type="fence_apc_snmp">
<instance_attributes id="apc_snmp_node2_mk-pdu01-instance_attributes">
<nvpair id="apc_snmp_node2_mk-pdu01-instance_attributes-ip" name="ip" value="10.201.2.3"/>
<nvpair id="apc_snmp_node2_mk-pdu01-instance_attributes-pcmk_host_list" name="pcmk_host_list" value="mk-a02n02"/>
<nvpair id="apc_snmp_node2_mk-pdu01-instance_attributes-pcmk_off_action" name="pcmk_off_action" value="reboot"/>
<nvpair id="apc_snmp_node2_mk-pdu01-instance_attributes-port" name="port" value="4"/>
</instance_attributes>
<operations>
<op id="apc_snmp_node2_mk-pdu01-monitor-interval-60" interval="60" name="monitor"/>
</operations>
</primitive>
<primitive class="stonith" id="apc_snmp_node2_mk-pdu02" type="fence_apc_snmp">
<instance_attributes id="apc_snmp_node2_mk-pdu02-instance_attributes">
<nvpair id="apc_snmp_node2_mk-pdu02-instance_attributes-ip" name="ip" value="10.201.2.4"/>
<nvpair id="apc_snmp_node2_mk-pdu02-instance_attributes-pcmk_host_list" name="pcmk_host_list" value="mk-a02n02"/>
<nvpair id="apc_snmp_node2_mk-pdu02-instance_attributes-pcmk_off_action" name="pcmk_off_action" value="reboot"/>
<nvpair id="apc_snmp_node2_mk-pdu02-instance_attributes-port" name="port" value="4"/>
</instance_attributes>
<operations>
<op id="apc_snmp_node2_mk-pdu02-monitor-interval-60" interval="60" name="monitor"/>
</operations>
</primitive>
<primitive class="ocf" id="srv07-el6" provider="alteeve" type="server">
<instance_attributes id="srv07-el6-instance_attributes">
<nvpair id="srv07-el6-instance_attributes-name" name="name" value="srv07-el6"/>
</instance_attributes>
<meta_attributes id="srv07-el6-meta_attributes">
<nvpair id="srv07-el6-meta_attributes-allow-migrate" name="allow-migrate" value="true"/>
<nvpair id="srv07-el6-meta_attributes-target-role" name="target-role" value="Stopped"/>
</meta_attributes>
<operations>
<op id="srv07-el6-migrate_from-interval-0s" interval="0s" name="migrate_from" timeout="600"/>
<op id="srv07-el6-migrate_to-interval-0s" interval="0s" name="migrate_to" timeout="600"/>
<op id="srv07-el6-monitor-interval-60" interval="60" name="monitor" on-fail="block"/>
<op id="srv07-el6-notify-interval-0s" interval="0s" name="notify" timeout="20"/>
<op id="srv07-el6-start-interval-0s" interval="0s" name="start" timeout="30"/>
<op id="srv07-el6-stop-interval-0s" interval="0s" name="stop" timeout="60"/>
</operations>
</primitive>
</resources>
<constraints>
<rsc_location id="cli-prefer-srv07-el6" node="mk-a02n02" role="Started" rsc="srv07-el6" score="INFINITY"/>
</constraints>
<fencing-topology>
<fencing-level devices="ipmilan_node1" id="fl-mk-a02n01-1" index="1" target="mk-a02n01"/>
<fencing-level devices="apc_snmp_node1_mk-pdu01,apc_snmp_node1_mk-pdu02" id="fl-mk-a02n01-2" index="2" target="mk-a02n01"/>
<fencing-level devices="ipmilan_node2" id="fl-mk-a02n02-1" index="1" target="mk-a02n02"/>
<fencing-level devices="apc_snmp_node2_mk-pdu01,apc_snmp_node2_mk-pdu02" id="fl-mk-a02n02-2" index="2" target="mk-a02n02"/>
</fencing-topology>
</configuration>
<status>
<node_state id="2" uname="mk-a02n02" in_ccm="true" crmd="online" crm-debug-origin="do_update_resource" join="member" expected="member">
<lrm id="2">
<lrm_resources>
<lrm_resource id="ipmilan_node1" type="fence_ipmilan" class="stonith">
<lrm_rsc_op id="ipmilan_node1_last_0" operation_key="ipmilan_node1_monitor_0" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="8:0:7:fd70072a-cb46-4085-89db-f7c0073f77ce" transition-magic="0:7;8:0:7:fd70072a-cb46-4085-89db-f7c0073f77ce" exit-reason="" on_node="mk-a02n02" call-id="5" rc-code="7" op-status="0" interval="0" last-rc-change="1597616557" last-run="1597616557" exec-time="2" queue-time="0" op-digest="230c3c46a7f39ff7a5ff7f1b8aa9f17d" op-secure-params=" password passwd " op-secure-digest="a8bb97c4c1cae8f90e445a0ce85ecc19"/>
</lrm_resource>
<lrm_resource id="apc_snmp_node1_mk-pdu01" type="fence_apc_snmp" class="stonith">
<lrm_rsc_op id="apc_snmp_node1_mk-pdu01_last_0" operation_key="apc_snmp_node1_mk-pdu01_start_0" operation="start" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="17:0:0:fd70072a-cb46-4085-89db-f7c0073f77ce" transition-magic="0:0;17:0:0:fd70072a-cb46-4085-89db-f7c0073f77ce" exit-reason="" on_node="mk-a02n02" call-id="30" rc-code="0" op-status="0" interval="0" last-rc-change="1597616558" last-run="1597616558" exec-time="598" queue-time="0" op-digest="6b6191eeb61cd595ab0a26ec9762f8aa" op-secure-params=" password passwd " op-secure-digest="1dc851b0efa605b4ec3f03e3a3ba62f7"/>
</lrm_resource>
<lrm_resource id="apc_snmp_node1_mk-pdu02" type="fence_apc_snmp" class="stonith">
<lrm_rsc_op id="apc_snmp_node1_mk-pdu02_last_0" operation_key="apc_snmp_node1_mk-pdu02_monitor_0" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="10:0:7:fd70072a-cb46-4085-89db-f7c0073f77ce" transition-magic="0:7;10:0:7:fd70072a-cb46-4085-89db-f7c0073f77ce" exit-reason="" on_node="mk-a02n02" call-id="13" rc-code="7" op-status="0" interval="0" last-rc-change="1597616557" last-run="1597616557" exec-time="0" queue-time="0" op-digest="f4b11aca778aa58d81b7fa096bfe3fb4" op-secure-params=" password passwd " op-secure-digest="78517effd4af72191ac2c0b9d8567fcd"/>
</lrm_resource>
<lrm_resource id="ipmilan_node2" type="fence_ipmilan" class="stonith">
<lrm_rsc_op id="ipmilan_node2_last_0" operation_key="ipmilan_node2_start_0" operation="start" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="21:0:0:fd70072a-cb46-4085-89db-f7c0073f77ce" transition-magic="0:0;21:0:0:fd70072a-cb46-4085-89db-f7c0073f77ce" exit-reason="" on_node="mk-a02n02" call-id="31" rc-code="0" op-status="0" interval="0" last-rc-change="1597616558" last-run="1597616558" exec-time="98" queue-time="0" op-digest="e759a456df902485096d4a48725ed81c" op-secure-params=" password passwd " op-secure-digest="47989163387c397e63fa3acdbec0d274"/>
</lrm_resource>
<lrm_resource id="apc_snmp_node2_mk-pdu01" type="fence_apc_snmp" class="stonith">
<lrm_rsc_op id="apc_snmp_node2_mk-pdu01_last_0" operation_key="apc_snmp_node2_mk-pdu01_monitor_0" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="12:0:7:fd70072a-cb46-4085-89db-f7c0073f77ce" transition-magic="0:7;12:0:7:fd70072a-cb46-4085-89db-f7c0073f77ce" exit-reason="" on_node="mk-a02n02" call-id="21" rc-code="7" op-status="0" interval="0" last-rc-change="1597616557" last-run="1597616557" exec-time="0" queue-time="0" op-digest="3d4af69481cb01c8c8f0f8af95940b99" op-secure-params=" password passwd " op-secure-digest="fd2959d25b0a20f6d1bc630f7565fd78"/>
</lrm_resource>
<lrm_resource id="apc_snmp_node2_mk-pdu02" type="fence_apc_snmp" class="stonith">
<lrm_rsc_op id="apc_snmp_node2_mk-pdu02_last_0" operation_key="apc_snmp_node2_mk-pdu02_start_0" operation="start" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="25:0:0:fd70072a-cb46-4085-89db-f7c0073f77ce" transition-magic="0:0;25:0:0:fd70072a-cb46-4085-89db-f7c0073f77ce" exit-reason="" on_node="mk-a02n02" call-id="32" rc-code="0" op-status="0" interval="0" last-rc-change="1597616558" last-run="1597616558" exec-time="591" queue-time="0" op-digest="7787bf20740a07e14145707988b18000" op-secure-params=" password passwd " op-secure-digest="11d1e757682ff46234d9816e06534953"/>
</lrm_resource>
<lrm_resource id="srv07-el6" type="server" class="ocf" provider="alteeve">
<lrm_rsc_op id="srv07-el6_last_0" operation_key="srv07-el6_monitor_0" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="14:0:7:fd70072a-cb46-4085-89db-f7c0073f77ce" transition-magic="0:7;14:0:7:fd70072a-cb46-4085-89db-f7c0073f77ce" exit-reason="" on_node="mk-a02n02" call-id="29" rc-code="7" op-status="0" interval="0" last-rc-change="1597616558" last-run="1597616558" exec-time="536" queue-time="1" op-digest="41dcb3443c331f2fe7ae92962905159f"/>
</lrm_resource>
</lrm_resources>
</lrm>
</node_state>
<node_state id="1" uname="mk-a02n01" in_ccm="true" crmd="online" crm-debug-origin="do_update_resource" join="member" expected="member">
<lrm id="1">
<lrm_resources>
<lrm_resource id="ipmilan_node1" type="fence_ipmilan" class="stonith">
<lrm_rsc_op id="ipmilan_node1_last_0" operation_key="ipmilan_node1_start_0" operation="start" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="15:0:0:fd70072a-cb46-4085-89db-f7c0073f77ce" transition-magic="0:0;15:0:0:fd70072a-cb46-4085-89db-f7c0073f77ce" exit-reason="" on_node="mk-a02n01" call-id="30" rc-code="0" op-status="0" interval="0" last-rc-change="1597616559" last-run="1597616559" exec-time="113" queue-time="0" op-digest="230c3c46a7f39ff7a5ff7f1b8aa9f17d" op-secure-params=" password passwd " op-secure-digest="a8bb97c4c1cae8f90e445a0ce85ecc19"/>
</lrm_resource>
<lrm_resource id="apc_snmp_node1_mk-pdu01" type="fence_apc_snmp" class="stonith">
<lrm_rsc_op id="apc_snmp_node1_mk-pdu01_last_0" operation_key="apc_snmp_node1_mk-pdu01_monitor_0" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="2:0:7:fd70072a-cb46-4085-89db-f7c0073f77ce" transition-magic="0:7;2:0:7:fd70072a-cb46-4085-89db-f7c0073f77ce" exit-reason="" on_node="mk-a02n01" call-id="9" rc-code="7" op-status="0" interval="0" last-rc-change="1597616557" last-run="1597616557" exec-time="0" queue-time="0" op-digest="6b6191eeb61cd595ab0a26ec9762f8aa" op-secure-params=" password passwd " op-secure-digest="1dc851b0efa605b4ec3f03e3a3ba62f7"/>
</lrm_resource>
<lrm_resource id="apc_snmp_node1_mk-pdu02" type="fence_apc_snmp" class="stonith">
<lrm_rsc_op id="apc_snmp_node1_mk-pdu02_last_0" operation_key="apc_snmp_node1_mk-pdu02_start_0" operation="start" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="19:0:0:fd70072a-cb46-4085-89db-f7c0073f77ce" transition-magic="0:0;19:0:0:fd70072a-cb46-4085-89db-f7c0073f77ce" exit-reason="" on_node="mk-a02n01" call-id="31" rc-code="0" op-status="0" interval="0" last-rc-change="1597616559" last-run="1597616559" exec-time="616" queue-time="0" op-digest="f4b11aca778aa58d81b7fa096bfe3fb4" op-secure-params=" password passwd " op-secure-digest="78517effd4af72191ac2c0b9d8567fcd"/>
</lrm_resource>
<lrm_resource id="ipmilan_node2" type="fence_ipmilan" class="stonith">
<lrm_rsc_op id="ipmilan_node2_last_0" operation_key="ipmilan_node2_monitor_0" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="4:0:7:fd70072a-cb46-4085-89db-f7c0073f77ce" transition-magic="0:7;4:0:7:fd70072a-cb46-4085-89db-f7c0073f77ce" exit-reason="" on_node="mk-a02n01" call-id="17" rc-code="7" op-status="0" interval="0" last-rc-change="1597616557" last-run="1597616557" exec-time="0" queue-time="0" op-digest="e759a456df902485096d4a48725ed81c" op-secure-params=" password passwd " op-secure-digest="47989163387c397e63fa3acdbec0d274"/>
</lrm_resource>
<lrm_resource id="apc_snmp_node2_mk-pdu01" type="fence_apc_snmp" class="stonith">
<lrm_rsc_op id="apc_snmp_node2_mk-pdu01_last_0" operation_key="apc_snmp_node2_mk-pdu01_start_0" operation="start" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="23:0:0:fd70072a-cb46-4085-89db-f7c0073f77ce" transition-magic="0:0;23:0:0:fd70072a-cb46-4085-89db-f7c0073f77ce" exit-reason="" on_node="mk-a02n01" call-id="32" rc-code="0" op-status="0" interval="0" last-rc-change="1597616559" last-run="1597616559" exec-time="617" queue-time="0" op-digest="3d4af69481cb01c8c8f0f8af95940b99" op-secure-params=" password passwd " op-secure-digest="fd2959d25b0a20f6d1bc630f7565fd78"/>
</lrm_resource>
<lrm_resource id="apc_snmp_node2_mk-pdu02" type="fence_apc_snmp" class="stonith">
<lrm_rsc_op id="apc_snmp_node2_mk-pdu02_last_0" operation_key="apc_snmp_node2_mk-pdu02_monitor_0" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="6:0:7:fd70072a-cb46-4085-89db-f7c0073f77ce" transition-magic="0:7;6:0:7:fd70072a-cb46-4085-89db-f7c0073f77ce" exit-reason="" on_node="mk-a02n01" call-id="25" rc-code="7" op-status="0" interval="0" last-rc-change="1597616557" last-run="1597616557" exec-time="0" queue-time="0" op-digest="7787bf20740a07e14145707988b18000" op-secure-params=" password passwd " op-secure-digest="11d1e757682ff46234d9816e06534953"/>
</lrm_resource>
<lrm_resource id="srv07-el6" type="server" class="ocf" provider="alteeve">
<lrm_rsc_op id="srv07-el6_last_0" operation_key="srv07-el6_monitor_0" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="7:0:7:fd70072a-cb46-4085-89db-f7c0073f77ce" transition-magic="0:7;7:0:7:fd70072a-cb46-4085-89db-f7c0073f77ce" exit-reason="" on_node="mk-a02n01" call-id="29" rc-code="7" op-status="0" interval="0" last-rc-change="1597616559" last-run="1597616559" exec-time="541" queue-time="0" op-digest="41dcb3443c331f2fe7ae92962905159f"/>
</lrm_resource>
</lrm_resources>
</lrm>
</node_state>
</status>
</cib>
';
$anvil->Get->bridges({ debug => 2 });
my $host = $anvil->_short_host_name();
$anvil->Cluster->parse_cib({
debug => 2,
#cib => $xml,
});
print "Cluster is in maintenance mode? [".$anvil->data->{cib}{parsed}{data}{cluster}{'maintenance-mode'}."]\n";
die;

Loading…
Cancel
Save