* Created Cluster->parse_cib() that parses pacemaker's CIB (cluster information base) XML. This also switches to the XML::LibXML, starting the replacement of XML::Simple. It's far from finished, but parses out basic node data and fence data.

Signed-off-by: Digimer <digimer@alteeve.ca>
main
Digimer 5 years ago
parent 597d9413a5
commit 62d0a2aa39
  1. 393
      Anvil/Tools/Cluster.pm
  2. 2
      cgi-bin/striker
  3. 20
      notes
  4. 1
      rpm/SPECS/anvil.spec
  5. 14
      share/words.xml
  6. 8
      tools/test.pl

@ -5,14 +5,16 @@ package Anvil::Tools::Cluster;
use strict;
use warnings;
use Scalar::Util qw(weaken isweak);
use Data::Dumper;
use XML::Simple qw(:strict);
use XML::LibXML;
use Scalar::Util qw(weaken isweak);
our $VERSION = "3.0.0";
my $THIS_FILE = "Cluster.pm";
### Methods;
# get_peer
# parse_cib
=pod
@ -72,23 +74,398 @@ sub parent
# Public methods #
#############################################################################################################
=head2 get_peer
=head2 parse_cib
This method will return the peer's host name, B<< if >> this host is itself a node in a cluster.
This reads in the CIB XML and parses it. On success, it returns C<< 0 >>. On failure (ie: pcsd isn't running), returns C<< 1 >>.
=cut
sub get_peer
sub parse_cib
{
my $self = shift;
my $parameter = shift;
my $anvil = $self->parent;
my $debug = defined $parameter->{debug} ? $parameter->{debug} : 3;
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => $debug, key => "log_0125", variables => { method => "Database->_test_access()" }});
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => $debug, key => "log_0125", variables => { method => "Cluster->parse_cib()" }});
# If we parsed before, delete it.
if (exists $anvil->data->{cib}{parsed})
{
delete $anvil->data->{cib}{parsed};
}
my $problem = 1;
my $shell_call = $anvil->data->{path}{exe}{pcs}." cluster cib";
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { shell_call => $shell_call }});
my $peer_host_name = "";
my ($cib_data, $return_code) = $anvil->System->call({debug => 3, shell_call => $shell_call});
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
cib_data => $cib_data,
return_code => $return_code,
}});
if ($return_code)
{
# Failed to read the CIB.
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => $debug, key => "warning_0052"});
}
else
{
my $dom = eval { XML::LibXML->load_xml(string => $cib_data); };
if ($@)
{
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => $debug, key => "warning_0053", variables => {
cib => $cib_data,
error => $@,
}});
}
else
{
# Successful parse!
=cut
<cib crm_feature_set="3.3.0" validate-with="pacemaker-3.2" epoch="5" num_updates="0" admin_epoch="0" cib-last-written="Fri Jul 10 19:51:41 2020" update-origin="el8-a01n01" update-client="crmd" update-user="hacluster" have-quorum="1">
<configuration>
<crm_config>
<cluster_property_set id="cib-bootstrap-options">
<nvpair id="cib-bootstrap-options-have-watchdog" name="have-watchdog" value="false"/>
<nvpair id="cib-bootstrap-options-dc-version" name="dc-version" value="2.0.3-5.el8_2.1-4b1f869f0f"/>
<nvpair id="cib-bootstrap-options-cluster-infrastructure" name="cluster-infrastructure" value="corosync"/>
<nvpair id="cib-bootstrap-options-cluster-name" name="cluster-name" value="el8-anvil-01"/>
</cluster_property_set>
</crm_config>
<nodes>
<node id="1" uname="el8-a01n01"/>
<node id="2" uname="el8-a01n02"/>
</nodes>
<resources/>
<constraints/>
</configuration>
<status/>
</cib>
==================
<cib crm_feature_set="3.3.0" validate-with="pacemaker-3.2" epoch="5" num_updates="4" admin_epoch="0" cib-last-written="Fri Jul 10 17:35:48 2020" update-origin="el8-a01n01" update-client="crmd" update-user="hacluster" have-quorum="1" dc-uuid="2">
<configuration>
<crm_config>
<cluster_property_set id="cib-bootstrap-options">
<nvpair id="cib-bootstrap-options-have-watchdog" name="have-watchdog" value="false"/>
<nvpair id="cib-bootstrap-options-dc-version" name="dc-version" value="2.0.3-5.el8_2.1-4b1f869f0f"/>
<nvpair id="cib-bootstrap-options-cluster-infrastructure" name="cluster-infrastructure" value="corosync"/>
<nvpair id="cib-bootstrap-options-cluster-name" name="cluster-name" value="el8-anvil-01"/>
</cluster_property_set>
</crm_config>
<nodes>
<node id="1" uname="el8-a01n01"/>
<node id="2" uname="el8-a01n02"/>
</nodes>
<resources/>
<constraints/>
</configuration>
<status>
<node_state id="2" uname="el8-a01n02" in_ccm="true" crmd="online" crm-debug-origin="do_state_transition" join="member" expected="member">
<lrm id="2">
<lrm_resources/>
</lrm>
</node_state>
<node_state id="1" uname="el8-a01n01" in_ccm="true" crmd="online" crm-debug-origin="do_state_transition" join="member" expected="member">
<lrm id="1">
<lrm_resources/>
</lrm>
</node_state>
</status>
</cib>
================== First fence
<cib crm_feature_set="3.3.0" validate-with="pacemaker-3.2" epoch="6" num_updates="8" admin_epoch="0" cib-last-written="Sat Jul 11 04:49:06 2020" update-origin="el8-a01n01" update-client="cibadmin" update-user="root" have-quorum="1" dc-uuid="1">
<configuration>
<crm_config>
<cluster_property_set id="cib-bootstrap-options">
<nvpair id="cib-bootstrap-options-have-watchdog" name="have-watchdog" value="false"/>
<nvpair id="cib-bootstrap-options-dc-version" name="dc-version" value="2.0.3-5.el8_2.1-4b1f869f0f"/>
<nvpair id="cib-bootstrap-options-cluster-infrastructure" name="cluster-infrastructure" value="corosync"/>
<nvpair id="cib-bootstrap-options-cluster-name" name="cluster-name" value="el8-anvil-01"/>
</cluster_property_set>
</crm_config>
<nodes>
<node id="1" uname="el8-a01n01"/>
<node id="2" uname="el8-a01n02"/>
</nodes>
<resources>
<primitive class="stonith" id="virsh_node1" type="fence_virsh">
<instance_attributes id="virsh_node1-instance_attributes">
<nvpair id="virsh_node1-instance_attributes-delay" name="delay" value="15"/>
<nvpair id="virsh_node1-instance_attributes-ipaddr" name="ipaddr" value="192.168.122.1"/>
<nvpair id="virsh_node1-instance_attributes-login" name="login" value="root"/>
<nvpair id="virsh_node1-instance_attributes-passwd" name="passwd" value="high generous distance"/>
<nvpair id="virsh_node1-instance_attributes-pcmk_host_list" name="pcmk_host_list" value="el8-a01n01"/>
<nvpair id="virsh_node1-instance_attributes-port" name="port" value="el8-a01n01"/>
</instance_attributes>
<operations>
<op id="virsh_node1-monitor-interval-60" interval="60" name="monitor"/>
</operations>
</primitive>
</resources>
<constraints/>
</configuration>
<status>
<node_state id="1" uname="el8-a01n01" in_ccm="true" crmd="online" crm-debug-origin="do_update_resource" join="member" expected="member">
<lrm id="1">
<lrm_resources>
<lrm_resource id="virsh_node1" type="fence_virsh" class="stonith">
<lrm_rsc_op id="virsh_node1_last_0" operation_key="virsh_node1_start_0" operation="start" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="3:4:0:b6d5caa1-2120-49f4-a534-29b724e74161" transition-magic="0:0;3:4:0:b6d5caa1-2120-49f4-a534-29b724e74161" exit-reason="" on_node="el8-a01n01" call-id="6" rc-code="0" op-status="0" interval="0" last-rc-change="1594442946" last-run="1594442946" exec-time="737" queue-time="0" op-digest="608a523c27162c0c4648550326dd1b26" op-secure-params=" password passwd " op-secure-digest="56bdf46bebc74266a3efb03c61e05c7d"/>
<lrm_rsc_op id="virsh_node1_monitor_60000" operation_key="virsh_node1_monitor_60000" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="4:4:0:b6d5caa1-2120-49f4-a534-29b724e74161" transition-magic="0:0;4:4:0:b6d5caa1-2120-49f4-a534-29b724e74161" exit-reason="" on_node="el8-a01n01" call-id="7" rc-code="0" op-status="0" interval="60000" last-rc-change="1594442947" exec-time="614" queue-time="0" op-digest="5be687ff1e141e610106215889894545" op-secure-params=" password passwd " op-secure-digest="56bdf46bebc74266a3efb03c61e05c7d"/>
</lrm_resource>
</lrm_resources>
</lrm>
</node_state>
<node_state id="2" uname="el8-a01n02" in_ccm="true" crmd="online" crm-debug-origin="do_update_resource" join="member" expected="member">
<lrm id="2">
<lrm_resources>
<lrm_resource id="virsh_node1" type="fence_virsh" class="stonith">
<lrm_rsc_op id="virsh_node1_last_0" operation_key="virsh_node1_monitor_0" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="2:4:7:b6d5caa1-2120-49f4-a534-29b724e74161" transition-magic="0:7;2:4:7:b6d5caa1-2120-49f4-a534-29b724e74161" exit-reason="" on_node="el8-a01n02" call-id="5" rc-code="7" op-status="0" interval="0" last-rc-change="1594442946" last-run="1594442946" exec-time="3" queue-time="0" op-digest="608a523c27162c0c4648550326dd1b26" op-secure-params=" password passwd " op-secure-digest="56bdf46bebc74266a3efb03c61e05c7d"/>
</lrm_resource>
</lrm_resources>
</lrm>
</node_state>
</status>
</cib>
================== Second fence
<cib crm_feature_set="3.3.0" validate-with="pacemaker-3.2" epoch="7" num_updates="8" admin_epoch="0" cib-last-written="Sat Jul 11 04:55:41 2020" update-origin="el8-a01n01" update-client="cibadmin" update-user="root" have-quorum="1" dc-uuid="1">
<configuration>
<crm_config>
<cluster_property_set id="cib-bootstrap-options">
<nvpair id="cib-bootstrap-options-have-watchdog" name="have-watchdog" value="false"/>
<nvpair id="cib-bootstrap-options-dc-version" name="dc-version" value="2.0.3-5.el8_2.1-4b1f869f0f"/>
<nvpair id="cib-bootstrap-options-cluster-infrastructure" name="cluster-infrastructure" value="corosync"/>
<nvpair id="cib-bootstrap-options-cluster-name" name="cluster-name" value="el8-anvil-01"/>
</cluster_property_set>
</crm_config>
<nodes>
<node id="1" uname="el8-a01n01"/>
<node id="2" uname="el8-a01n02"/>
</nodes>
<resources>
<primitive class="stonith" id="virsh_node1" type="fence_virsh">
<instance_attributes id="virsh_node1-instance_attributes">
<nvpair id="virsh_node1-instance_attributes-delay" name="delay" value="15"/>
<nvpair id="virsh_node1-instance_attributes-ipaddr" name="ipaddr" value="192.168.122.1"/>
<nvpair id="virsh_node1-instance_attributes-login" name="login" value="root"/>
<nvpair id="virsh_node1-instance_attributes-passwd" name="passwd" value="high generous distance"/>
<nvpair id="virsh_node1-instance_attributes-pcmk_host_list" name="pcmk_host_list" value="el8-a01n01"/>
<nvpair id="virsh_node1-instance_attributes-port" name="port" value="el8-a01n01"/>
</instance_attributes>
<operations>
<op id="virsh_node1-monitor-interval-60" interval="60" name="monitor"/>
</operations>
</primitive>
<primitive class="stonith" id="virsh_node2" type="fence_virsh">
<instance_attributes id="virsh_node2-instance_attributes">
<nvpair id="virsh_node2-instance_attributes-ipaddr" name="ipaddr" value="192.168.122.1"/>
<nvpair id="virsh_node2-instance_attributes-login" name="login" value="root"/>
<nvpair id="virsh_node2-instance_attributes-passwd" name="passwd" value="high generous distance"/>
<nvpair id="virsh_node2-instance_attributes-pcmk_host_list" name="pcmk_host_list" value="el8-a01n02"/>
<nvpair id="virsh_node2-instance_attributes-port" name="port" value="el8-a01n02"/>
</instance_attributes>
<operations>
<op id="virsh_node2-monitor-interval-60" interval="60" name="monitor"/>
</operations>
</primitive>
</resources>
<constraints/>
</configuration>
<status>
<node_state id="1" uname="el8-a01n01" in_ccm="true" crmd="online" crm-debug-origin="do_update_resource" join="member" expected="member">
<lrm id="1">
<lrm_resources>
<lrm_resource id="virsh_node1" type="fence_virsh" class="stonith">
<lrm_rsc_op id="virsh_node1_last_0" operation_key="virsh_node1_start_0" operation="start" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="3:4:0:b6d5caa1-2120-49f4-a534-29b724e74161" transition-magic="0:0;3:4:0:b6d5caa1-2120-49f4-a534-29b724e74161" exit-reason="" on_node="el8-a01n01" call-id="6" rc-code="0" op-status="0" interval="0" last-rc-change="1594442946" last-run="1594442946" exec-time="737" queue-time="0" op-digest="608a523c27162c0c4648550326dd1b26" op-secure-params=" password passwd " op-secure-digest="56bdf46bebc74266a3efb03c61e05c7d"/>
<lrm_rsc_op id="virsh_node1_monitor_60000" operation_key="virsh_node1_monitor_60000" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="4:4:0:b6d5caa1-2120-49f4-a534-29b724e74161" transition-magic="0:0;4:4:0:b6d5caa1-2120-49f4-a534-29b724e74161" exit-reason="" on_node="el8-a01n01" call-id="7" rc-code="0" op-status="0" interval="60000" last-rc-change="1594442947" exec-time="614" queue-time="0" op-digest="5be687ff1e141e610106215889894545" op-secure-params=" password passwd " op-secure-digest="56bdf46bebc74266a3efb03c61e05c7d"/>
</lrm_resource>
<lrm_resource id="virsh_node2" type="fence_virsh" class="stonith">
<lrm_rsc_op id="virsh_node2_last_0" operation_key="virsh_node2_monitor_0" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="2:5:7:b6d5caa1-2120-49f4-a534-29b724e74161" transition-magic="0:7;2:5:7:b6d5caa1-2120-49f4-a534-29b724e74161" exit-reason="" on_node="el8-a01n01" call-id="11" rc-code="7" op-status="0" interval="0" last-rc-change="1594443341" last-run="1594443341" exec-time="0" queue-time="0" op-digest="e545a3390de0c9d2624ef4cac775b9c9" op-secure-params=" password passwd " op-secure-digest="8065dc4867c73abfb780e52db7525148"/>
</lrm_resource>
</lrm_resources>
</lrm>
</node_state>
<node_state id="2" uname="el8-a01n02" in_ccm="true" crmd="online" crm-debug-origin="do_update_resource" join="member" expected="member">
<lrm id="2">
<lrm_resources>
<lrm_resource id="virsh_node1" type="fence_virsh" class="stonith">
<lrm_rsc_op id="virsh_node1_last_0" operation_key="virsh_node1_monitor_0" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="2:4:7:b6d5caa1-2120-49f4-a534-29b724e74161" transition-magic="0:7;2:4:7:b6d5caa1-2120-49f4-a534-29b724e74161" exit-reason="" on_node="el8-a01n02" call-id="5" rc-code="7" op-status="0" interval="0" last-rc-change="1594442946" last-run="1594442946" exec-time="3" queue-time="0" op-digest="608a523c27162c0c4648550326dd1b26" op-secure-params=" password passwd " op-secure-digest="56bdf46bebc74266a3efb03c61e05c7d"/>
</lrm_resource>
<lrm_resource id="virsh_node2" type="fence_virsh" class="stonith">
<lrm_rsc_op id="virsh_node2_last_0" operation_key="virsh_node2_start_0" operation="start" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="6:5:0:b6d5caa1-2120-49f4-a534-29b724e74161" transition-magic="0:0;6:5:0:b6d5caa1-2120-49f4-a534-29b724e74161" exit-reason="" on_node="el8-a01n02" call-id="10" rc-code="0" op-status="0" interval="0" last-rc-change="1594443341" last-run="1594443341" exec-time="992" queue-time="0" op-digest="e545a3390de0c9d2624ef4cac775b9c9" op-secure-params=" password passwd " op-secure-digest="8065dc4867c73abfb780e52db7525148"/>
<lrm_rsc_op id="virsh_node2_monitor_60000" operation_key="virsh_node2_monitor_60000" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="7:5:0:b6d5caa1-2120-49f4-a534-29b724e74161" transition-magic="0:0;7:5:0:b6d5caa1-2120-49f4-a534-29b724e74161" exit-reason="" on_node="el8-a01n02" call-id="11" rc-code="0" op-status="0" interval="60000" last-rc-change="1594443342" exec-time="763" queue-time="1" op-digest="73223e8083cc6447a379293b74bbab9d" op-secure-params=" password passwd " op-secure-digest="8065dc4867c73abfb780e52db7525148"/>
</lrm_resource>
</lrm_resources>
</lrm>
</node_state>
</status>
</cib>
================ Enable stonith
<cib crm_feature_set="3.3.0" validate-with="pacemaker-3.2" epoch="8" num_updates="0" admin_epoch="0" cib-last-written="Sat Jul 11 04:57:22 2020" update-origin="el8-a01n01" update-client="cibadmin" update-user="root" have-quorum="1" dc-uuid="1">
<configuration>
<crm_config>
<cluster_property_set id="cib-bootstrap-options">
<nvpair id="cib-bootstrap-options-have-watchdog" name="have-watchdog" value="false"/>
<nvpair id="cib-bootstrap-options-dc-version" name="dc-version" value="2.0.3-5.el8_2.1-4b1f869f0f"/>
<nvpair id="cib-bootstrap-options-cluster-infrastructure" name="cluster-infrastructure" value="corosync"/>
<nvpair id="cib-bootstrap-options-cluster-name" name="cluster-name" value="el8-anvil-01"/>
<nvpair id="cib-bootstrap-options-stonith-enabled" name="stonith-enabled" value="true"/>
</cluster_property_set>
</crm_config>
<nodes>
<node id="1" uname="el8-a01n01"/>
<node id="2" uname="el8-a01n02"/>
</nodes>
<resources>
<primitive class="stonith" id="virsh_node1" type="fence_virsh">
<instance_attributes id="virsh_node1-instance_attributes">
<nvpair id="virsh_node1-instance_attributes-delay" name="delay" value="15"/>
<nvpair id="virsh_node1-instance_attributes-ipaddr" name="ipaddr" value="192.168.122.1"/>
<nvpair id="virsh_node1-instance_attributes-login" name="login" value="root"/>
<nvpair id="virsh_node1-instance_attributes-passwd" name="passwd" value="high generous distance"/>
<nvpair id="virsh_node1-instance_attributes-pcmk_host_list" name="pcmk_host_list" value="el8-a01n01"/>
<nvpair id="virsh_node1-instance_attributes-port" name="port" value="el8-a01n01"/>
</instance_attributes>
<operations>
<op id="virsh_node1-monitor-interval-60" interval="60" name="monitor"/>
</operations>
</primitive>
<primitive class="stonith" id="virsh_node2" type="fence_virsh">
<instance_attributes id="virsh_node2-instance_attributes">
<nvpair id="virsh_node2-instance_attributes-ipaddr" name="ipaddr" value="192.168.122.1"/>
<nvpair id="virsh_node2-instance_attributes-login" name="login" value="root"/>
<nvpair id="virsh_node2-instance_attributes-passwd" name="passwd" value="high generous distance"/>
<nvpair id="virsh_node2-instance_attributes-pcmk_host_list" name="pcmk_host_list" value="el8-a01n02"/>
<nvpair id="virsh_node2-instance_attributes-port" name="port" value="el8-a01n02"/>
</instance_attributes>
<operations>
<op id="virsh_node2-monitor-interval-60" interval="60" name="monitor"/>
</operations>
</primitive>
</resources>
<constraints/>
</configuration>
<status>
<node_state id="1" uname="el8-a01n01" in_ccm="true" crmd="online" crm-debug-origin="do_update_resource" join="member" expected="member">
<lrm id="1">
<lrm_resources>
<lrm_resource id="virsh_node1" type="fence_virsh" class="stonith">
<lrm_rsc_op id="virsh_node1_last_0" operation_key="virsh_node1_start_0" operation="start" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="3:4:0:b6d5caa1-2120-49f4-a534-29b724e74161" transition-magic="0:0;3:4:0:b6d5caa1-2120-49f4-a534-29b724e74161" exit-reason="" on_node="el8-a01n01" call-id="6" rc-code="0" op-status="0" interval="0" last-rc-change="1594442946" last-run="1594442946" exec-time="737" queue-time="0" op-digest="608a523c27162c0c4648550326dd1b26" op-secure-params=" password passwd " op-secure-digest="56bdf46bebc74266a3efb03c61e05c7d"/>
<lrm_rsc_op id="virsh_node1_monitor_60000" operation_key="virsh_node1_monitor_60000" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="4:4:0:b6d5caa1-2120-49f4-a534-29b724e74161" transition-magic="0:0;4:4:0:b6d5caa1-2120-49f4-a534-29b724e74161" exit-reason="" on_node="el8-a01n01" call-id="7" rc-code="0" op-status="0" interval="60000" last-rc-change="1594442947" exec-time="614" queue-time="0" op-digest="5be687ff1e141e610106215889894545" op-secure-params=" password passwd " op-secure-digest="56bdf46bebc74266a3efb03c61e05c7d"/>
</lrm_resource>
<lrm_resource id="virsh_node2" type="fence_virsh" class="stonith">
<lrm_rsc_op id="virsh_node2_last_0" operation_key="virsh_node2_monitor_0" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="2:5:7:b6d5caa1-2120-49f4-a534-29b724e74161" transition-magic="0:7;2:5:7:b6d5caa1-2120-49f4-a534-29b724e74161" exit-reason="" on_node="el8-a01n01" call-id="11" rc-code="7" op-status="0" interval="0" last-rc-change="1594443341" last-run="1594443341" exec-time="0" queue-time="0" op-digest="e545a3390de0c9d2624ef4cac775b9c9" op-secure-params=" password passwd " op-secure-digest="8065dc4867c73abfb780e52db7525148"/>
</lrm_resource>
</lrm_resources>
</lrm>
</node_state>
<node_state id="2" uname="el8-a01n02" in_ccm="true" crmd="online" crm-debug-origin="do_update_resource" join="member" expected="member">
<lrm id="2">
<lrm_resources>
<lrm_resource id="virsh_node1" type="fence_virsh" class="stonith">
<lrm_rsc_op id="virsh_node1_last_0" operation_key="virsh_node1_monitor_0" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="2:4:7:b6d5caa1-2120-49f4-a534-29b724e74161" transition-magic="0:7;2:4:7:b6d5caa1-2120-49f4-a534-29b724e74161" exit-reason="" on_node="el8-a01n02" call-id="5" rc-code="7" op-status="0" interval="0" last-rc-change="1594442946" last-run="1594442946" exec-time="3" queue-time="0" op-digest="608a523c27162c0c4648550326dd1b26" op-secure-params=" password passwd " op-secure-digest="56bdf46bebc74266a3efb03c61e05c7d"/>
</lrm_resource>
<lrm_resource id="virsh_node2" type="fence_virsh" class="stonith">
<lrm_rsc_op id="virsh_node2_last_0" operation_key="virsh_node2_start_0" operation="start" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="6:5:0:b6d5caa1-2120-49f4-a534-29b724e74161" transition-magic="0:0;6:5:0:b6d5caa1-2120-49f4-a534-29b724e74161" exit-reason="" on_node="el8-a01n02" call-id="10" rc-code="0" op-status="0" interval="0" last-rc-change="1594443341" last-run="1594443341" exec-time="992" queue-time="0" op-digest="e545a3390de0c9d2624ef4cac775b9c9" op-secure-params=" password passwd " op-secure-digest="8065dc4867c73abfb780e52db7525148"/>
<lrm_rsc_op id="virsh_node2_monitor_60000" operation_key="virsh_node2_monitor_60000" operation="monitor" crm-debug-origin="do_update_resource" crm_feature_set="3.3.0" transition-key="7:5:0:b6d5caa1-2120-49f4-a534-29b724e74161" transition-magic="0:0;7:5:0:b6d5caa1-2120-49f4-a534-29b724e74161" exit-reason="" on_node="el8-a01n02" call-id="11" rc-code="0" op-status="0" interval="60000" last-rc-change="1594443342" exec-time="763" queue-time="1" op-digest="73223e8083cc6447a379293b74bbab9d" op-secure-params=" password passwd " op-secure-digest="8065dc4867c73abfb780e52db7525148"/>
</lrm_resource>
</lrm_resources>
</lrm>
</node_state>
</status>
</cib>
=cut
$problem = 0;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { problem => $problem }});
foreach my $primitive ($dom->findnodes('/cib/configuration/resources/primitive'))
{
my $class = $primitive->{class};
my $id = $primitive->{id};
my $type = $primitive->{type};
$anvil->data->{cib}{parsed}{cib}{resources}{primitive}{$class}{$id}{type} = $type;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
"cib::parsed::cib::resources::primitive:${class}::${id}::type" => $anvil->data->{cib}{parsed}{cib}{resources}{primitive}{$class}{$id}{type},
}});
foreach my $nvpair ($primitive->findnodes('./instance_attributes/nvpair'))
{
my $name = $nvpair->{name};
foreach my $variable (sort {$a cmp $b} keys %{$nvpair})
{
next if $variable eq "name";
$anvil->data->{cib}{parsed}{cib}{resources}{primitive}{$class}{$id}{instance_attributes}{$name}{$variable} = $nvpair->{$variable};;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
"cib::parsed::cib::resources::primitive::${class}::${id}::instance_attributes::${name}::${variable}" => $anvil->data->{cib}{parsed}{cib}{resources}{primitive}{$class}{$id}{instance_attributes}{$name}{$variable},
}});
}
}
foreach my $nvpair ($primitive->findnodes('./operations/op'))
{
my $id = $nvpair->{id};
foreach my $variable (sort {$a cmp $b} keys %{$nvpair})
{
next if $variable eq "id";
$anvil->data->{cib}{parsed}{cib}{resources}{primitive}{$class}{$id}{operations}{op}{$id}{$variable} = $nvpair->{$variable};;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
"cib::parsed::cib::resources::primitive::${class}::${id}::operations::op::${id}::${variable}" => $anvil->data->{cib}{parsed}{cib}{resources}{primitive}{$class}{$id}{operations}{op}{$id}{$variable},
}});
}
}
}
die;
foreach my $attribute ($dom->findnodes('/cib'))
{
foreach my $variable (sort {$a cmp $b} keys %{$attribute})
{
$anvil->data->{cib}{parsed}{cib}{$variable} = $attribute->{$variable};
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
"cib::parsed::cib::${variable}" => $anvil->data->{cib}{parsed}{cib}{$variable},
}});
}
}
foreach my $nvpair ($dom->findnodes('/cib/configuration/crm_config/cluster_property_set/nvpair'))
{
my $name = $nvpair->{name};
$anvil->data->{cib}{parsed}{configuration}{crm_config}{cluster_property_set}{nvpair}{$name}{id} = $nvpair->{id};
$anvil->data->{cib}{parsed}{configuration}{crm_config}{cluster_property_set}{nvpair}{$name}{value} = $nvpair->{value};
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
"cib::parsed::configuration::crm_config::cluster_property_set::nvpair::${name}::id" => $anvil->data->{cib}{parsed}{configuration}{crm_config}{cluster_property_set}{nvpair}{$name}{id},
"cib::parsed::configuration::crm_config::cluster_property_set::nvpair::${name}::value" => $anvil->data->{cib}{parsed}{configuration}{crm_config}{cluster_property_set}{nvpair}{$name}{value},
}});
}
foreach my $node ($dom->findnodes('/cib/configuration/nodes/node'))
{
my $uname = $node->{uname};
$anvil->data->{cib}{parsed}{configuration}{nodes}{$uname}{id} = $node->{id};
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
"cib::parsed::configuration::nodes::${uname}::id" => $anvil->data->{cib}{parsed}{configuration}{nodes}{$uname}{id},
}});
}
# Status isn't available until the cluster has been up for a bit.
foreach my $node_state ($dom->findnodes('/cib/status/node_state'))
{
my $uname = $node_state->{uname};
foreach my $variable (sort {$a cmp $b} keys %{$node_state})
{
next if $variable eq "uname";
$anvil->data->{cib}{parsed}{cib}{node_state}{$uname}{$variable} = $node_state->{$variable};;
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
"cib::parsed::cib::node_state::${uname}::${variable}" => $anvil->data->{cib}{parsed}{cib}{node_state}{$uname}{$variable},
}});
}
}
}
}
#print Dumper $anvil->data->{cib}{parsed};
return($peer_host_name);
return($problem);
}
# =head3

@ -5,7 +5,7 @@
# 1 == Host UUID not available yet.
#
# TODO:
# * Switch out XML::Simple to XML::Twig or libXML
# * Switch out XML::Simple to XML::Twig or libXML - Started in Cluster->parse_cib();
# - 15:05 < perlbot> XML::Simple commits the fatal flaw of trying to massage complicated and often
# irregular XML into the simple and highly regular world of perl data structures.
# Irregularities cause "not a hashref" sort of errors in your program. Use a real

20
notes

@ -255,10 +255,10 @@ systemctl stop libvirtd.service
==== One node
pcs host auth el8-a01n01 el8-a01n02 -u hacluster -p "secret"
pcs cluster setup m3-anvil-01 m3-a01n01 m3-a01n02
pcs cluster setup m3-anvil-01 el8-a01n01 el8-a01n02
pcs cluster start --all
pcs stonith create virsh_node1 fence_virsh pcmk_host_list="m3-a01n01" ipaddr="192.168.122.1" passwd="secret" login="root" delay="15" port="m3-a01n01" op monitor interval="60"
pcs stonith create virsh_node2 fence_virsh pcmk_host_list="m3-a01n02" ipaddr="192.168.122.1" passwd="secret" login="root" port="m3-a01n02" op monitor interval="60"
pcs stonith create virsh_node1 fence_virsh pcmk_host_list="el8-a01n01" ipaddr="192.168.122.1" passwd="secret" login="root" delay="15" port="el8-a01n01" op monitor interval="60"
pcs stonith create virsh_node2 fence_virsh pcmk_host_list="el8-a01n02" ipaddr="192.168.122.1" passwd="secret" login="root" port="el8-a01n02" op monitor interval="60"
pcs property set stonith-enabled=true
pcs resource create hypervisor systemd:libvirtd op monitor interval=60
@ -271,7 +271,7 @@ pcs resource create test_server ocf:alteeve:server name="test_server" meta allow
pcs resource update test_server ocf:alteeve:server name="test_server" meta allow-migrate="true" op monitor interval="60"
# Test
stonith_admin --fence m3-a01n02 --verbose; crm_error $?
stonith_admin --fence el8-a01n02 --verbose; crm_error $?
==== DRBD notes
@ -302,16 +302,16 @@ firewall-cmd --reload
* Provision the server via virt-install
* push the new XML to striker such that the peer's anvil daemon picks it up and writes it out.
[root@m3-a01n01 drbd.d]# drbdsetup status r0 --verbose --statistics
[root@el8-a01n01 drbd.d]# drbdsetup status r0 --verbose --statistics
r0 node-id:1 role:Primary suspended:no
write-ordering:flush
volume:0 minor:0 disk:UpToDate quorum:yes
size:10485404 read:9682852 written:0 al-writes:0 bm-writes:0 upper-pending:0 lower-pending:0 al-suspended:no blocked:no
m3-a01n02.alteeve.com node-id:0 connection:Connected role:Secondary congested:no
el8-a01n02.alteeve.com node-id:0 connection:Connected role:Secondary congested:no
volume:0 replication:SyncSource peer-disk:Inconsistent done:92.29 resync-suspended:no
received:0 sent:9679140 out-of-sync:808144 pending:6 unacked:3
[root@m3-a01n02 ~]# cat /sys/kernel/debug/drbd/resources/r0/connections/m3-a01n01.alteeve.com/0/proc_drbd
[root@el8-a01n02 ~]# cat /sys/kernel/debug/drbd/resources/r0/connections/el8-a01n01.alteeve.com/0/proc_drbd
0: cs:SyncSource ro:Primary/Secondary ds:UpToDate/Inconsistent C r-----
ns:24360 nr:10485404 dw:10485404 dr:25420 al:0 bm:0 lo:0 pe:[0;0] ua:0 ap:[0;0] ep:1 wo:2 oos:10461044
[>....................] sync'ed: 0.3% (10212/10236)M
@ -321,12 +321,12 @@ r0 node-id:1 role:Primary suspended:no
act_log: used:0/1237 hits:0 misses:0 starving:0 locked:0 changed:0
blocked on activity log: 0
[root@m3-a01n02 ~]# drbdadm primary r0
[root@el8-a01n02 ~]# drbdadm primary r0
r0: State change failed: (-1) Multiple primaries not allowed by config
Command 'drbdsetup primary r0' terminated with exit code 11
[root@m3-a01n02 ~]# drbdadm net-options --allow-two-primaries=yes r0
[root@m3-a01n02 ~]# drbdadm net-options --allow-two-primaries=no r0
[root@el8-a01n02 ~]# drbdadm net-options --allow-two-primaries=yes r0
[root@el8-a01n02 ~]# drbdadm net-options --allow-two-primaries=no r0
drbdsetup show all
drbdsetup show all --show-defaults

@ -64,6 +64,7 @@ Requires: perl-Sys-Syslog
Requires: perl-Text-Diff
Requires: perl-Time-HiRes
Requires: perl-UUID-Tiny
Requires: perl-XML-LibXML
Requires: perl-XML-Simple
Requires: postfix
Requires: postgresql-contrib

@ -1008,7 +1008,7 @@ NOTE: Please be patient!
<key name="message_0078">Output: [#!variable!line!#].</key>
<key name="message_0079">Error: [#!variable!line!#].</key>
<!-- The strings below are used in the 'default' PXE menu. Entries used as option labels use '^' to indicate the hotkey, and must be unique in a given menu. -->
<key name="message_0080">#!string!brand_0006!# - Install Target Menu</key>
<key name="message_0080">#!string!brand_0002!# - Install Target Menu</key>
<key name="message_0081">Will boot the next device as configured in your BIOS in # second{,s}.</key>
<key name="message_0082"><![CDATA[Press the <tab> key to edit the boot parameters of the highlighted option.]]></key>
<key name="message_0083">Editing of this option is disabled.</key>
@ -1651,6 +1651,18 @@ Here we will inject 't_0006', which injects 't_0001' which has a variable: [#!st
<key name="warning_0049">[ Warning ] - The passwords do not match.</key>
<key name="warning_0050">[ Warning ] - The host: [#!variable!host!#] now belongs to the #!string!brand_0006!#, it can't be used here anymore.</key>
<key name="warning_0051">[ Warning ] - The IP address: [#!variable!ip!#] is not valid. Ignoring associated hosts: [#!variable!hosts!#].</key>
<key name="warning_0052">[ Warning ] - Failed to read the CIB. Is 'pcsd' running and is the cluster started?</key>
<key name="warning_0053">[ Warning ] - Failed to parse the CIB. The CIB read was:
========
#!variable!cib!#
========
The error was:
========
#!variable!error!#
========
</key>
</language>
<!-- 日本語 -->

@ -21,8 +21,8 @@ my $anvil = Anvil::Tools->new();
$anvil->Log->level({set => 2});
$anvil->Log->secure({set => 1});
print "Connecting to the database(s);\n";
$anvil->Database->connect();
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, 'print' => 1, level => 2, secure => 0, key => "log_0132"});
# print "Connecting to the database(s);\n";
# $anvil->Database->connect();
# $anvil->Log->entry({source => $THIS_FILE, line => __LINE__, 'print' => 1, level => 2, secure => 0, key => "log_0132"});
$anvil->System->parse_corosync_conf({debug => 2});
$anvil->Cluster->parse_cib({debug => 2});

Loading…
Cancel
Save