modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerComponent.java |
34 +++++++---
modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml |
18 ++++-
2 files changed, 41 insertions(+), 11 deletions(-)
New commits:
commit 067a73451b1b37b9c77191847749fb7afd5b88d7
Author: Lukas Krejci <lkrejci(a)redhat.com>
Date: Thu Aug 2 13:56:23 2012 +0200
* added events support to the rest of the hadoop resource types
* renamed "DataNode" to "Hadoop DataNode"
* Tied event polling registration to the avail check because we are only able to find
out the name of the log if the hadoop server is running.
diff --git
a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerComponent.java
b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerComponent.java
index f6911b9..cb36798 100644
---
a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerComponent.java
+++
b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerComponent.java
@@ -45,6 +45,7 @@ import org.rhq.core.pluginapi.inventory.ResourceContext;
import org.rhq.core.pluginapi.measurement.MeasurementFacet;
import org.rhq.core.pluginapi.operation.OperationFacet;
import org.rhq.core.pluginapi.operation.OperationResult;
+import org.rhq.core.system.ProcessInfo;
import org.rhq.plugins.jmx.JMXComponent;
import org.rhq.plugins.jmx.JMXServerComponent;
@@ -60,25 +61,21 @@ public class HadoopServerComponent extends
JMXServerComponent<ResourceComponent<
private HadoopOperationsDelegate operationsDelegate;
+ private boolean eventsRegistered;
+
@Override
@SuppressWarnings({ "rawtypes", "unchecked" })
public void start(ResourceContext context) throws Exception {
super.start(context);
configurationDelegate = new HadoopServerConfigurationDelegate(context);
this.operationsDelegate = new HadoopOperationsDelegate(context);
-
- EventContext events = context.getEventContext();
- if (events != null) {
- File logFile = determineLogFile();
- int interval =
Integer.parseInt(context.getPluginConfiguration().getSimpleValue(LOG_POLLING_INTERVAL_PROPERTY,
"60"));
- events.registerEventPoller(new LogFileEventPoller(events, LOG_EVENT_TYPE,
logFile, new Log4JLogEntryProcessor(LOG_EVENT_TYPE, logFile)), interval);
- }
}
@Override
public void stop() {
EventContext events = getResourceContext().getEventContext();
- if (events != null) {
+ if (events != null && eventsRegistered) {
+ eventsRegistered = false;
events.unregisterEventPoller(LOG_EVENT_TYPE);
}
super.stop();
@@ -90,7 +87,26 @@ public class HadoopServerComponent extends
JMXServerComponent<ResourceComponent<
*/
@Override
public AvailabilityType getAvailability() {
- return getResourceContext().getNativeProcess().isRunning() ? AvailabilityType.UP
: AvailabilityType.DOWN;
+ ProcessInfo process = getResourceContext().getNativeProcess();
+
+ AvailabilityType ret = process == null ? AvailabilityType.DOWN :
(process.isRunning() ? AvailabilityType.UP : AvailabilityType.DOWN);
+
+ EventContext events = getResourceContext().getEventContext();
+ if (events != null) {
+ if (ret == AvailabilityType.UP) {
+ if (!eventsRegistered) {
+ File logFile = determineLogFile();
+ int interval =
Integer.parseInt(getResourceContext().getPluginConfiguration().getSimpleValue(LOG_POLLING_INTERVAL_PROPERTY,
"60"));
+ events.registerEventPoller(new LogFileEventPoller(events,
LOG_EVENT_TYPE, logFile, new Log4JLogEntryProcessor(LOG_EVENT_TYPE, logFile)), interval);
+ eventsRegistered = true;
+ }
+ } else if (eventsRegistered) {
+ eventsRegistered = false;
+ events.unregisterEventPoller(LOG_EVENT_TYPE);
+ }
+ }
+
+ return ret;
}
@Override
diff --git a/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml
b/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml
index ef420e6..7e75fa3 100644
--- a/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml
+++ b/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml
@@ -7,7 +7,7 @@
<!-- NameNode (
http://wiki.apache.org/hadoop/NameNode) -->
<server name="Hadoop NameNode" discovery="HadoopServerDiscovery"
class="HadoopServerComponent">
<plugin-configuration>
- <c:simple-property name="hadoop.home.dir" displayName="Home
Directory"/>
+ <c:simple-property name="hadoop.home.dir" displayName="Home
Directory" default="*** SHOULD HAVE BEEN AUTODETECTED ***"/>
<c:simple-property name="_mainClass" displayName="Main
Class" readOnly="true"
default="org.apache.hadoop.hdfs.server.namenode.NameNode"/>
<c:simple-property name="logPollingInterval" default="60"
@@ -85,6 +85,8 @@
<plugin-configuration>
<c:simple-property name="_mainClass" displayName="Main
Class" readOnly="true"
default="org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode"/>
+ <c:simple-property name="hadoop.home.dir" displayName="Home
Directory" default="*** SHOULD HAVE BEEN AUTODETECTED ***"/>
+ <c:simple-property name="logPollingInterval" default="60"
description="The interval for log file polling in seconds."/>
</plugin-configuration>
<process-scan name="SecondaryNameNode"
query="process|basename|match=^java.*,arg|-Dproc_secondarynamenode|match=.*"/>
@@ -99,13 +101,16 @@
<c:simple-property name="operationResult" description="Outcome
of stopping the SecondaryNameNode"/>
</results>
</operation>
+
+ <event name="logEntry" description="an entry in a log
file"/>
</server>
<!-- DataNode (
http://wiki.apache.org/hadoop/DataNode) -->
- <server name="DataNode" discovery="HadoopServerDiscovery"
class="HadoopServerComponent">
+ <server name="Hadoop DataNode" discovery="HadoopServerDiscovery"
class="HadoopServerComponent">
<plugin-configuration>
<c:simple-property name="_mainClass" displayName="Main
Class" readOnly="true"
default="org.apache.hadoop.hdfs.server.datanode.DataNode"/>
+ <c:simple-property name="logPollingInterval" default="60"
description="The interval for log file polling in seconds."/>
</plugin-configuration>
<process-scan name="DataNode"
query="process|basename|match=^java.*,arg|*|match=.*proc_datanode.*"/>
@@ -128,6 +133,8 @@
<metric
property="Hadoop:service=DataNode,name=FSDatasetState*:StorageInfo"
dataType="trait" displayType="summary"/>
<metric
property="Hadoop:service=DataNode,name=RpcActivitForPort*:NumOpenConnections"
displayName="Number of Open Connections"/>
+ <event name="logEntry" description="an entry in a log
file"/>
+
<resource-configuration>
<c:simple-property name="conf/hdfs-site.xml:dfs.data.dir"
displayName="Storage Directory"
description="Comma separated list of paths on the local filesystem of a
DataNode where it should store its blocks." required="false"/>
@@ -137,9 +144,11 @@
<!-- JobTracker (
http://wiki.apache.org/hadoop/JobTracker) -->
<server name="Hadoop JobTracker"
discovery="HadoopServerDiscovery" class="HadoopServerComponent">
<plugin-configuration>
+ <c:simple-property name="hadoop.home.dir" displayName="Home
Directory" default="*** SHOULD HAVE BEEN AUTODETECTED ***"/>
<c:simple-property name="baseObjectName"
defaultValue="hadoop:service=JobTracker"/>
<c:simple-property name="_mainClass" displayName="Main
Class" readOnly="true"
default="org.apache.hadoop.mapred.JobTracker"/>
+ <c:simple-property name="logPollingInterval" default="60"
description="The interval for log file polling in seconds."/>
</plugin-configuration>
<process-scan name="JobTracker"
query="process|basename|match=^java.*,arg|-Dproc_jobtracker|match=.*"/>
@@ -191,6 +200,8 @@
<metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:trackers_graylisted"
displayName="Graylisted Nodes"/>
<metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:trackers_decommissioned"
displayName="Excluded Nodes"/>
+ <event name="logEntry" description="an entry in a log
file"/>
+
<resource-configuration>
<c:simple-property name="conf/mapred-site.xml:mapred.job.tracker"
displayName="Host And Port" description="Host or IP and port of JobTracker.
host:port pair." required="false"/>
<c:simple-property name="conf/mapred-site.xml:mapred.system.dir"
displayName="System Files Location" description="Path on the HDFS where
where the MapReduce framework stores system files e.g. /hadoop/mapred/system/. This is in
the default filesystem (HDFS) and must be accessible from both the server and client
machines." required="false"/>
@@ -204,8 +215,10 @@
<!-- TaskTracker (
http://wiki.apache.org/hadoop/TaskTracker) -->
<server name="Hadoop TaskTracker"
discovery="HadoopServerDiscovery" class="HadoopServerComponent">
<plugin-configuration>
+ <c:simple-property name="hadoop.home.dir" displayName="Home
Directory" default="*** SHOULD HAVE BEEN AUTODETECTED ***"/>
<c:simple-property name="_mainClass" displayName="Main
Class" readOnly="true"
default="org.apache.hadoop.mapred.TaskTracker"/>
+ <c:simple-property name="logPollingInterval" default="60"
description="The interval for log file polling in seconds."/>
</plugin-configuration>
<process-scan name="TaskTracker"
query="process|basename|match=^java.*,arg|-Dproc_tasktracker|match=.*"/>
@@ -233,5 +246,6 @@
<metric
property="Hadoop:service=TaskTracker,name=TaskTrackerInfo:JobTrackerUrl"
displayName="Tasks Completed"
dataType="trait"/>
+ <event name="logEntry" description="an entry in a log
file"/>
</server>
</plugin>