modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceComponent.java |
137 +++++-----
modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml |
107 +++++--
2 files changed, 150 insertions(+), 94 deletions(-)
New commits:
commit 8c5819f9c4cade6c78998159fafda5f892ed8b5a
Author: Jirka Kremser <jkremser(a)redhat.com>
Date: Tue Jul 31 18:52:38 2012 +0200
Metric definitions for server of type JobTracker
diff --git a/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml
b/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml
index cd22c2d..af5b865 100644
--- a/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml
+++ b/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml
@@ -11,68 +11,72 @@
<c:simple-property name="_mainClass" displayName="Main
Class" readOnly="true"
default="org.apache.hadoop.hdfs.server.namenode.NameNode" />
</plugin-configuration>
- <process-scan name="NameNode"
- query="process|basename|match=^java.*,arg|-Dproc_namenode|match=.*"/>
+ <process-scan name="NameNode"
query="process|basename|match=^java.*,arg|-Dproc_namenode|match=.*"/>
- <metric
property="Hadoop:service=NameNode,name=NameNodeInfo|NameDirStatuses"
displayName="NameNode Storage"
+ <metric
property="Hadoop:service=NameNode,name=NameNodeInfo:NameDirStatuses"
displayName="NameNode Storage"
dataType="trait" displayType="summary"/>
- <metric property="Hadoop:service=NameNode,name=NameNodeInfo|Version"
displayName="Version" dataType="trait"
+ <metric property="Hadoop:service=NameNode,name=NameNodeInfo:Version"
displayName="Version" dataType="trait"
displayType="summary"/>
- <metric property="Hadoop:service=NameNode,name=NameNodeInfo|DeadNodes"
displayName="Dead Nodes" dataType="trait"
+ <metric property="Hadoop:service=NameNode,name=NameNodeInfo:DeadNodes"
displayName="Dead Nodes" dataType="trait"
displayType="summary"/>
- <metric property="Hadoop:service=NameNode,name=NameNodeInfo|DecomNodes"
displayName="Decommissioning Nodes"
+ <metric property="Hadoop:service=NameNode,name=NameNodeInfo:DecomNodes"
displayName="Decommissioning Nodes"
dataType="trait" displayType="summary"/>
- <metric property="Hadoop:service=NameNode,name=NameNodeInfo|LiveNodes"
displayName="Live Nodes" dataType="trait"
+ <metric property="Hadoop:service=NameNode,name=NameNodeInfo:LiveNodes"
displayName="Live Nodes" dataType="trait"
displayType="summary"/>
- <metric property="Hadoop:service=NameNode,name=NameNodeInfo|Total"
displayName="Capacity Total" units="bytes"
+ <metric property="Hadoop:service=NameNode,name=NameNodeInfo:Total"
displayName="Capacity Total" units="bytes"
description="DFS Configured capacitiy"/>
- <metric property="Hadoop:service=NameNode,name=NameNodeInfo|Used"
displayName="DFS Used" units="bytes"
+ <metric property="Hadoop:service=NameNode,name=NameNodeInfo:Used"
displayName="DFS Used" units="bytes"
description="DFS used" displayType="summary"/>
- <metric property="Hadoop:service=NameNode,name=NameNodeInfo|PercentUsed"
displayName="DFS Used %" units="percentage"
+ <metric property="Hadoop:service=NameNode,name=NameNodeInfo:PercentUsed"
displayName="DFS Used %"
description="DFS Used %" displayType="summary"/>
- <metric
property="Hadoop:service=NameNode,name=NameNodeInfo|NonDfsUsedSpace"
displayName="Non DFS Used"
+ <metric
property="Hadoop:service=NameNode,name=NameNodeInfo:NonDfsUsedSpace"
displayName="Non DFS Used"
units="bytes" description="Non DFS used"/>
- <metric property="Hadoop:service=NameNode,name=NameNodeInfo|Free"
displayName="DFS Capacity Remaining" units="bytes"
+ <metric property="Hadoop:service=NameNode,name=NameNodeInfo:Free"
displayName="DFS Capacity Remaining" units="bytes"
description="DFS remaining"/>
- <metric
property="Hadoop:service=NameNode,name=NameNodeInfo|PercentRemaining"
displayName="DFS Capacity Remaining %"
- units="percentage" description="DFS remaining"/>
- <metric property="Hadoop:service=NameNode,name=NameNodeInfo|TotalBlocks"
displayName="Blocks Total" units="none"/>
- <metric
property="Hadoop:service=NameNode,name=FSNamesystemState|FilesTotal"
displayName="FilesTotal" units="none"/>
- <metric
property="Hadoop:service=NameNode,name=FSNamesystemState|PendingReplicationBlocks"
displayName="Pending Replication Blocks"
+ <!-- jmx returns number from interval (0,100) not from (0,1)
units="percentage" -->
+ <metric
property="Hadoop:service=NameNode,name=NameNodeInfo:PercentRemaining"
displayName="DFS Capacity Remaining %"
+ description="DFS remaining"/>
+ <metric property="Hadoop:service=NameNode,name=NameNodeInfo:TotalBlocks"
displayName="Blocks Total" units="none"/>
+ <metric
property="Hadoop:service=NameNode,name=FSNamesystemState:FilesTotal"
displayName="FilesTotal" units="none"/>
+ <metric
property="Hadoop:service=NameNode,name=FSNamesystemState:PendingReplicationBlocks"
displayName="Pending Replication Blocks"
units="none"/>
- <metric
property="Hadoop:service=NameNode,name=NameNodeActivity|FilesCreated"
displayName="Files Created"
+ <metric
property="Hadoop:service=NameNode,name=NameNodeActivity:FilesCreated"
displayName="Files Created"
units="none" measurementType="trendsup"/>
<resource-configuration>
- <c:simple-property name="conf/core-site.xml:fs.default.name"
displayName="Namenode URI" />
- <c:simple-property name="conf/hdfs-site.xml:dfs.name.dir"
displayName="Local Namespace and Logs Storage Directory" description="Path
on the local filesystem where the NameNode stores the namespace and transactions logs
persistently."/>
+ <c:simple-property name="conf/core-site.xml:fs.default.name"
displayName="Namenode URI"/>
+ <c:simple-property name="conf/hdfs-site.xml:dfs.name.dir"
displayName="Local Namespace and Logs Storage Directory"
+ description="Path on the local filesystem where the NameNode stores the
namespace and transactions logs persistently."/>
</resource-configuration>
</server>
<server name="SecondaryNameNode"
discovery="HadoopServiceDiscovery" class="HadoopServiceComponent">
<plugin-configuration>
- <c:simple-property name="_mainClass" displayName="Main
Class" readOnly="true"
default="org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode" />
+ <c:simple-property name="_mainClass" displayName="Main
Class" readOnly="true"
+
default="org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode"/>
</plugin-configuration>
- <process-scan name="SecondaryNameNode"
query="process|basename|match=^java.*,arg|*|match=.*proc_secondarynamenode.*"/>
+ <process-scan name="SecondaryNameNode"
query="process|basename|match=^java.*,arg|-Dproc_secondarynamenode|match=.*"/>
</server>
<!-- DataNode (
http://wiki.apache.org/hadoop/DataNode) -->
<server name="DataNode" discovery="HadoopServiceDiscovery"
class="HadoopServiceComponent">
<plugin-configuration>
- <c:simple-property name="_mainClass" displayName="Main
Class" readOnly="true"
default="org.apache.hadoop.hdfs.server.datanode.DataNode" />
+ <c:simple-property name="_mainClass" displayName="Main
Class" readOnly="true"
+ default="org.apache.hadoop.hdfs.server.datanode.DataNode"/>
</plugin-configuration>
<process-scan name="DataNode"
query="process|basename|match=^java.*,arg|*|match=.*proc_datanode.*"/>
- <metric property="Hadoop:service=DataNode,name=DataNode*|bytes_written"
displayName="Bytes Writter"
+ <metric property="Hadoop:service=DataNode,name=DataNode*:bytes_written"
displayName="Bytes Writter"
measurementType="trendsup"/>
- <metric
property="Hadoop:service=DataNode,name=FSDatasetState*|Remaining"
displayName="Remaining" units="bytes"/>
- <metric property="Hadoop:service=DataNode,name=FSDatasetState*|Capacity"
displayName="Capacity" units="bytes"/>
- <metric
property="Hadoop:service=DataNode,name=FSDatasetState*|StorageInfo"
dataType="trait" displayType="summary"/>
- <metric
property="Hadoop:service=DataNode,name=RpcActivitForPort*|NumOpenConnections"
displayName="Number of Open Connections"/>
-
+ <metric
property="Hadoop:service=DataNode,name=FSDatasetState*:Remaining"
displayName="Remaining" units="bytes"/>
+ <metric property="Hadoop:service=DataNode,name=FSDatasetState*:Capacity"
displayName="Capacity" units="bytes"/>
+ <metric
property="Hadoop:service=DataNode,name=FSDatasetState*:StorageInfo"
dataType="trait" displayType="summary"/>
+ <metric
property="Hadoop:service=DataNode,name=RpcActivitForPort*:NumOpenConnections"
displayName="Number of Open Connections"/>
+
<resource-configuration>
- <c:simple-property name="conf/hdfs-site.xml:dfs.data.dir"
displayName="Storage Directory" description="Comma separated list of paths
on the local filesystem of a DataNode where it should store its blocks."/>
+ <c:simple-property name="conf/hdfs-site.xml:dfs.data.dir"
displayName="Storage Directory"
+ description="Comma separated list of paths on the local filesystem of a
DataNode where it should store its blocks."/>
</resource-configuration>
</server>
@@ -80,17 +84,52 @@
<server name="JobTracker" discovery="HadoopServiceDiscovery"
class="HadoopServiceComponent">
<plugin-configuration>
<c:simple-property name="baseObjectName"
defaultValue="hadoop:service=JobTracker"/>
- <c:simple-property name="_mainClass" displayName="Main
Class" readOnly="true"
default="org.apache.hadoop.mapred.JobTracker" />
+ <c:simple-property name="_mainClass" displayName="Main
Class" readOnly="true"
+ default="org.apache.hadoop.mapred.JobTracker"/>
</plugin-configuration>
- <process-scan name="JobTracker"
query="process|basename|match=^java.*,arg|*|match=.*proc_jobtracker.*"/>
+
+ <process-scan name="JobTracker"
query="process|basename|match=^java.*,arg|-Dproc_jobtracker|match=*"/>
+
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:jobs_completed"
displayName="Jobs Completed"
+ displayType="summary"/>
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:jobs_running"
displayName="Jobs Running"
+ displayType="summary"/>
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:jobs_preparing"
displayName="Jobs Preparing"
+ displayType="summary"/>
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:jobs_killed"
displayName="Jobs Killed"
+ displayType="summary"/>
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:jobs_failed"
displayName="Jobs Failed"
+ displayType="summary"/>
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:running_maps"
displayName="Running Map Tasks"
+ displayType="summary"/>
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:running_reduces"
displayName="Running Reduce Tasks"
+ displayType="summary"/>
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:jobs_submitted"
displayName="Total Submissions"
+ displayType="summary"/>
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:trackers"
displayName="Nodes" displayType="summary"/>
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:occupied_map_slots"
displayName="Occupied Map Slots"
+ displayType="summary"/>
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:occupied_reduce_slots"
displayName="Occupied Reduce Slots"
+ description="DFS Configured capacitiy"/>
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:reserved_map_slots"
displayName="Reserved Map Slots"
+ displayType="summary"/>
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:reserved_reduce_slots"
displayName="Reserved Reduce Slots"
+ displayType="summary"/>
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:map_slots"
displayName="Map Task Capacity"/>
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:reduce_slots"
displayName="Reduce Task Capacity"/>
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:trackers_blacklisted"
displayName="Blacklisted Nodes"/>
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:trackers_graylisted"
displayName="Graylisted Nodes"/>
+ <metric
property="Hadoop:service=JobTracker,name=JobTrackerMetrics:trackers_decommissioned"
displayName="Excluded Nodes"/>
+
</server>
<!-- TaskTracker (
http://wiki.apache.org/hadoop/TaskTracker) -->
<server name="TaskTracker" discovery="HadoopServiceDiscovery"
class="HadoopServiceComponent">
<plugin-configuration>
- <c:simple-property name="_mainClass" displayName="Main
Class" readOnly="true"
default="org.apache.hadoop.mapred.TaskTracker" />
+ <c:simple-property name="_mainClass" displayName="Main
Class" readOnly="true"
+ default="org.apache.hadoop.mapred.TaskTracker"/>
</plugin-configuration>
- <process-scan name="TaskTracker"
query="process|basename|match=^java.*,arg|*|match=.*proc_tasktracker.*"/>
+ <process-scan name="TaskTracker"
query="process|basename|match=^java.*,arg|-Dproc_tasktracker|match=*"/>
</server>
</plugin>
\ No newline at end of file
commit 0e51f3179a68b70cbaa73d25063d823ea01e1cbd
Author: Jirka Kremser <jkremser(a)redhat.com>
Date: Tue Jul 31 18:37:05 2012 +0200
Metrics subsystem is now collecting the data via JMX
diff --git
a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceComponent.java
b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceComponent.java
index e204355..02f7a22 100644
---
a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceComponent.java
+++
b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceComponent.java
@@ -44,6 +44,7 @@ import org.rhq.core.domain.configuration.definition.PropertyDefinition;
import org.rhq.core.domain.configuration.definition.PropertyDefinitionSimple;
import org.rhq.core.domain.measurement.AvailabilityType;
import org.rhq.core.domain.measurement.MeasurementDataNumeric;
+import org.rhq.core.domain.measurement.MeasurementDataTrait;
import org.rhq.core.domain.measurement.MeasurementReport;
import org.rhq.core.domain.measurement.MeasurementScheduleRequest;
import org.rhq.core.pluginapi.configuration.ConfigurationFacet;
@@ -57,22 +58,22 @@ import org.rhq.plugins.jmx.JMXServerComponent;
public class HadoopServiceComponent extends
JMXServerComponent<ResourceComponent<?>> implements
JMXComponent<ResourceComponent<?>>, MeasurementFacet, OperationFacet,
ConfigurationFacet {
-
+
private static final Log LOG = LogFactory.getLog(HadoopServiceComponent.class);
private static final XMLInputFactory XML_INPUT_FACTORY =
XMLInputFactory.newInstance();
-
+
private static final String PROPERTY_TAG_NAME = "property";
private static final String NAME_TAG_NAME = "name";
private static final String VALUE_TAG_NAME = "value";
-
+
/**
* Return availability of this resource
* @see org.rhq.core.pluginapi.inventory.ResourceComponent#getAvailability()
*/
@Override
public AvailabilityType getAvailability() {
- return getResourceContext().getNativeProcess().isRunning() ? AvailabilityType.UP:
AvailabilityType.DOWN;
+ return getResourceContext().getNativeProcess().isRunning() ? AvailabilityType.UP
: AvailabilityType.DOWN;
}
@Override
@@ -90,55 +91,68 @@ public class HadoopServiceComponent extends
JMXServerComponent<ResourceComponent
* @see
org.rhq.core.pluginapi.measurement.MeasurementFacet#getValues(org.rhq.core.domain.measurement.MeasurementReport,
java.util.Set)
*/
public void getValues(MeasurementReport report, Set<MeasurementScheduleRequest>
metrics) throws Exception {
-
- for (MeasurementScheduleRequest req : metrics) {
- String property = req.getName();
- String props[] = property.split("\\|");
-
- EmsConnection conn = getEmsConnection();
- EmsBean bean = conn.getBean(props[0]);
- if (bean != null) {
- bean.refreshAttributes();
- EmsAttribute att = bean.getAttribute(props[1]);
- if (att != null) {
- Long val = (Long) att.getValue(); // TODO check for real type
-
- MeasurementDataNumeric res = new MeasurementDataNumeric(req,
Double.valueOf(val));
- report.addData(res);
- } else
- LOG.warn("Attribute " + props[1] + " not
found");
- } else
- LOG.warn("MBean " + props[0] + " not found");
+ for (MeasurementScheduleRequest request : metrics) {
+ String name = request.getName();
+ int delimIndex = name.lastIndexOf(':');
+ String beanName = name.substring(0, delimIndex);
+ String attributeName = name.substring(delimIndex + 1);
+ try {
+ EmsConnection emsConnection = getEmsConnection();
+ EmsBean bean = emsConnection.getBean(beanName);
+ if (bean != null) {
+ bean.refreshAttributes();
+ EmsAttribute attribute = bean.getAttribute(attributeName);
+ if (attribute != null) {
+ Object valueObject = attribute.refresh();
+ if (valueObject instanceof Number) {
+ Number value = (Number) valueObject;
+ report.addData(new MeasurementDataNumeric(request,
value.doubleValue()));
+ } else {
+ report.addData(new MeasurementDataTrait(request,
valueObject.toString()));
+ }
+ } else {
+ LOG.warn("Attribute " + attributeName + " not
found");
+ }
+ } else {
+ LOG.warn("MBean " + beanName + " not found");
+ }
+ } catch (Exception e) {
+ LOG.error("Failed to obtain measurement [" + name +
"]", e);
+ }
}
}
public Configuration loadResourceConfiguration() throws Exception {
- ConfigurationDefinition definition =
getResourceContext().getResourceType().getResourceConfigurationDefinition();
+ ConfigurationDefinition definition = getResourceContext().getResourceType()
+ .getResourceConfigurationDefinition();
Configuration config = new Configuration();
-
- File homeDir = new
File(getResourceContext().getPluginConfiguration().getSimpleValue(HadoopServiceDiscovery.HOME_DIR_PROPERTY));
-
+
+ File homeDir = new
File(getResourceContext().getPluginConfiguration().getSimpleValue(
+ HadoopServiceDiscovery.HOME_DIR_PROPERTY));
+
if (!homeDir.exists()) {
- throw new IllegalArgumentException("The configured home directory of
this Hadoop instance (" + homeDir.getAbsolutePath() + ") no longer
exists.");
+ throw new IllegalArgumentException("The configured home directory of
this Hadoop instance ("
+ + homeDir.getAbsolutePath() + ") no longer exists.");
}
-
+
if (!homeDir.isDirectory()) {
- throw new IllegalArgumentException("The configured home directory of
this Hadoop instance (" + homeDir.getAbsolutePath() + ") is not a
directory.");
+ throw new IllegalArgumentException("The configured home directory of
this Hadoop instance ("
+ + homeDir.getAbsolutePath() + ") is not a directory.");
}
-
+
if (!homeDir.canRead()) {
- throw new IllegalArgumentException("The configured home directory of
this Hadoop instance (" + homeDir.getAbsolutePath() + ") is not
readable.");
+ throw new IllegalArgumentException("The configured home directory of
this Hadoop instance ("
+ + homeDir.getAbsolutePath() + ") is not readable.");
}
-
+
fillResourceConfiguration(homeDir, config, definition);
-
+
return config;
}
-
public void updateResourceConfiguration(ConfigurationUpdateReport report) {
// TODO Auto-generated method stub
-
+
}
/**
@@ -157,59 +171,62 @@ public class HadoopServiceComponent extends
JMXServerComponent<ResourceComponent
}
return res;
}
-
- public static void fillResourceConfiguration(File homeDir, Configuration config,
ConfigurationDefinition definition) throws XMLStreamException, IOException {
+
+ public static void fillResourceConfiguration(File homeDir, Configuration config,
ConfigurationDefinition definition)
+ throws XMLStreamException, IOException {
//the config is just a bunch of simples, so this is rather easy.. no cumbersome
traversal of property maps and lists
-
+
Map<String, PropertySimple> propertiesToFind = new HashMap<String,
PropertySimple>();
Set<File> configFilesToParse = new HashSet<File>();
-
- for(PropertyDefinition pd : definition.getPropertyDefinitions().values()) {
+
+ for (PropertyDefinition pd : definition.getPropertyDefinitions().values()) {
if (!(pd instanceof PropertyDefinitionSimple)) {
//hmm... well, someone thought it's enough to change the config and
the code would be clever.
//it's not ;)
continue;
}
-
+
String propertyName = pd.getName();
String[] parts = propertyName.split(":");
String fileName = parts[0];
String configName = parts[1];
-
+
File configFile = new File(homeDir, fileName);
-
+
if (!configFile.exists()) {
- throw new IllegalArgumentException("The expected configuration file
(" + configFile.getAbsolutePath() + ") doesn't exist.");
+ throw new IllegalArgumentException("The expected configuration file
(" + configFile.getAbsolutePath()
+ + ") doesn't exist.");
}
-
+
configFilesToParse.add(configFile);
-
+
PropertySimple prop = new PropertySimple();
prop.setName(propertyName);
config.put(prop);
-
+
propertiesToFind.put(configName, prop);
}
-
- for(File configFile : configFilesToParse) {
+
+ for (File configFile : configFilesToParse) {
parseAndAssignProps(configFile, propertiesToFind);
}
}
-
- private static void parseAndAssignProps(File configFile, Map<String,
PropertySimple> props) throws XMLStreamException, IOException {
+
+ private static void parseAndAssignProps(File configFile, Map<String,
PropertySimple> props)
+ throws XMLStreamException, IOException {
FileInputStream in = new FileInputStream(configFile);
XMLStreamReader rdr = XML_INPUT_FACTORY.createXMLStreamReader(in);
try {
boolean inProperty = false;
String propertyName = null;
String propertyValue = null;
-
- while(rdr.hasNext()) {
+
+ while (rdr.hasNext()) {
int event = rdr.next();
-
+
String tag = null;
-
- switch(event) {
+
+ switch (event) {
case XMLStreamReader.START_ELEMENT:
tag = rdr.getName().getLocalPart();
if (PROPERTY_TAG_NAME.equals(tag)) {
@@ -224,12 +241,12 @@ public class HadoopServiceComponent extends
JMXServerComponent<ResourceComponent
tag = rdr.getName().getLocalPart();
if (PROPERTY_TAG_NAME.equals(tag)) {
inProperty = false;
-
+
PropertySimple prop = props.get(propertyName);
if (prop != null) {
prop.setValue(propertyValue);
}
-
+
propertyName = null;
propertyValue = null;
}
@@ -241,5 +258,5 @@ public class HadoopServiceComponent extends
JMXServerComponent<ResourceComponent
in.close();
}
}
-
+
}