modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceComponent.java |
141 +++++++++-
modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceDiscovery.java |
29 +-
modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml |
2
3 files changed, 166 insertions(+), 6 deletions(-)
New commits:
commit d81f463b877b57a7e561159bbef7f0854b53dd81
Author: Lukas Krejci <lkrejci(a)redhat.com>
Date: Tue Jul 31 17:18:24 2012 +0200
Reading of hadoop config files should work now.
diff --git
a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceComponent.java
b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceComponent.java
index 5cb3806..e204355 100644
---
a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceComponent.java
+++
b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceComponent.java
@@ -19,8 +19,18 @@
package org.rhq.plugins.hadoop;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
import java.util.Set;
+import javax.xml.stream.XMLInputFactory;
+import javax.xml.stream.XMLStreamException;
+import javax.xml.stream.XMLStreamReader;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.mc4j.ems.connection.EmsConnection;
@@ -28,10 +38,16 @@ import org.mc4j.ems.connection.bean.EmsBean;
import org.mc4j.ems.connection.bean.attribute.EmsAttribute;
import org.rhq.core.domain.configuration.Configuration;
+import org.rhq.core.domain.configuration.PropertySimple;
+import org.rhq.core.domain.configuration.definition.ConfigurationDefinition;
+import org.rhq.core.domain.configuration.definition.PropertyDefinition;
+import org.rhq.core.domain.configuration.definition.PropertyDefinitionSimple;
import org.rhq.core.domain.measurement.AvailabilityType;
import org.rhq.core.domain.measurement.MeasurementDataNumeric;
import org.rhq.core.domain.measurement.MeasurementReport;
import org.rhq.core.domain.measurement.MeasurementScheduleRequest;
+import org.rhq.core.pluginapi.configuration.ConfigurationFacet;
+import org.rhq.core.pluginapi.configuration.ConfigurationUpdateReport;
import org.rhq.core.pluginapi.inventory.ResourceComponent;
import org.rhq.core.pluginapi.measurement.MeasurementFacet;
import org.rhq.core.pluginapi.operation.OperationFacet;
@@ -40,10 +56,16 @@ import org.rhq.plugins.jmx.JMXComponent;
import org.rhq.plugins.jmx.JMXServerComponent;
public class HadoopServiceComponent extends
JMXServerComponent<ResourceComponent<?>> implements
- JMXComponent<ResourceComponent<?>>, MeasurementFacet, OperationFacet {
+ JMXComponent<ResourceComponent<?>>, MeasurementFacet, OperationFacet,
ConfigurationFacet {
private static final Log LOG = LogFactory.getLog(HadoopServiceComponent.class);
+ private static final XMLInputFactory XML_INPUT_FACTORY =
XMLInputFactory.newInstance();
+
+ private static final String PROPERTY_TAG_NAME = "property";
+ private static final String NAME_TAG_NAME = "name";
+ private static final String VALUE_TAG_NAME = "value";
+
/**
* Return availability of this resource
* @see org.rhq.core.pluginapi.inventory.ResourceComponent#getAvailability()
@@ -55,7 +77,7 @@ public class HadoopServiceComponent extends
JMXServerComponent<ResourceComponent
@Override
public EmsConnection getEmsConnection() {
- EmsConnection conn = super.getEmsConnection(); // TODO: Customise this generated
block
+ EmsConnection conn = super.getEmsConnection();
if (LOG.isTraceEnabled()) {
LOG.trace("EmsConnection is " + conn.toString());
}
@@ -90,6 +112,35 @@ public class HadoopServiceComponent extends
JMXServerComponent<ResourceComponent
}
}
+ public Configuration loadResourceConfiguration() throws Exception {
+ ConfigurationDefinition definition =
getResourceContext().getResourceType().getResourceConfigurationDefinition();
+ Configuration config = new Configuration();
+
+ File homeDir = new
File(getResourceContext().getPluginConfiguration().getSimpleValue(HadoopServiceDiscovery.HOME_DIR_PROPERTY));
+
+ if (!homeDir.exists()) {
+ throw new IllegalArgumentException("The configured home directory of
this Hadoop instance (" + homeDir.getAbsolutePath() + ") no longer
exists.");
+ }
+
+ if (!homeDir.isDirectory()) {
+ throw new IllegalArgumentException("The configured home directory of
this Hadoop instance (" + homeDir.getAbsolutePath() + ") is not a
directory.");
+ }
+
+ if (!homeDir.canRead()) {
+ throw new IllegalArgumentException("The configured home directory of
this Hadoop instance (" + homeDir.getAbsolutePath() + ") is not
readable.");
+ }
+
+ fillResourceConfiguration(homeDir, config, definition);
+
+ return config;
+ }
+
+
+ public void updateResourceConfiguration(ConfigurationUpdateReport report) {
+ // TODO Auto-generated method stub
+
+ }
+
/**
* Invokes the passed operation on the managed resource
* @param name Name of the operation
@@ -106,5 +157,89 @@ public class HadoopServiceComponent extends
JMXServerComponent<ResourceComponent
}
return res;
}
-
+
+ public static void fillResourceConfiguration(File homeDir, Configuration config,
ConfigurationDefinition definition) throws XMLStreamException, IOException {
+ //the config is just a bunch of simples, so this is rather easy.. no cumbersome
traversal of property maps and lists
+
+ Map<String, PropertySimple> propertiesToFind = new HashMap<String,
PropertySimple>();
+ Set<File> configFilesToParse = new HashSet<File>();
+
+ for(PropertyDefinition pd : definition.getPropertyDefinitions().values()) {
+ if (!(pd instanceof PropertyDefinitionSimple)) {
+ //hmm... well, someone thought it's enough to change the config and
the code would be clever.
+ //it's not ;)
+ continue;
+ }
+
+ String propertyName = pd.getName();
+ String[] parts = propertyName.split(":");
+ String fileName = parts[0];
+ String configName = parts[1];
+
+ File configFile = new File(homeDir, fileName);
+
+ if (!configFile.exists()) {
+ throw new IllegalArgumentException("The expected configuration file
(" + configFile.getAbsolutePath() + ") doesn't exist.");
+ }
+
+ configFilesToParse.add(configFile);
+
+ PropertySimple prop = new PropertySimple();
+ prop.setName(propertyName);
+ config.put(prop);
+
+ propertiesToFind.put(configName, prop);
+ }
+
+ for(File configFile : configFilesToParse) {
+ parseAndAssignProps(configFile, propertiesToFind);
+ }
+ }
+
+ private static void parseAndAssignProps(File configFile, Map<String,
PropertySimple> props) throws XMLStreamException, IOException {
+ FileInputStream in = new FileInputStream(configFile);
+ XMLStreamReader rdr = XML_INPUT_FACTORY.createXMLStreamReader(in);
+ try {
+ boolean inProperty = false;
+ String propertyName = null;
+ String propertyValue = null;
+
+ while(rdr.hasNext()) {
+ int event = rdr.next();
+
+ String tag = null;
+
+ switch(event) {
+ case XMLStreamReader.START_ELEMENT:
+ tag = rdr.getName().getLocalPart();
+ if (PROPERTY_TAG_NAME.equals(tag)) {
+ inProperty = true;
+ } else if (inProperty && NAME_TAG_NAME.equals(tag)) {
+ propertyName = rdr.getElementText();
+ } else if (inProperty && VALUE_TAG_NAME.equals(tag)) {
+ propertyValue = rdr.getElementText();
+ }
+ break;
+ case XMLStreamReader.END_ELEMENT:
+ tag = rdr.getName().getLocalPart();
+ if (PROPERTY_TAG_NAME.equals(tag)) {
+ inProperty = false;
+
+ PropertySimple prop = props.get(propertyName);
+ if (prop != null) {
+ prop.setValue(propertyValue);
+ }
+
+ propertyName = null;
+ propertyValue = null;
+ }
+ break;
+ }
+ }
+ } finally {
+ rdr.close();
+ in.close();
+ }
+ }
+
}
diff --git
a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceDiscovery.java
b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceDiscovery.java
index 7b75931..e292168 100644
---
a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceDiscovery.java
+++
b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceDiscovery.java
@@ -52,7 +52,7 @@ public class HadoopServiceDiscovery implements
ResourceDiscoveryComponent<Resour
private static final String HADOOP_VERSION_MATCH =
"hadoop-core-([0-9\\.]+)\\.jar";
private static final Pattern HADOOP_VERSION_PATTERN =
Pattern.compile(HADOOP_VERSION_MATCH);
private static final String MAIN_CLASS_PROPERTY = "_mainClass";
- private static final String HOME_DIR_PROPERTY = "hadoop.home.dir";
+ public static final String HOME_DIR_PROPERTY = "hadoop.home.dir";
private static final String HOME_DIR_OPTION = "-Dhadoop.home.dir";
public Set<DiscoveredResourceDetails> discoverResources(
diff --git a/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml
b/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml
index 334fcf5..cd22c2d 100644
--- a/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml
+++ b/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml
@@ -7,7 +7,7 @@
<!-- NameNode (
http://wiki.apache.org/hadoop/NameNode) -->
<server name="NameNode" discovery="HadoopServiceDiscovery"
class="HadoopServiceComponent">
<plugin-configuration>
- <c:simple-property name="hadoop.home.dir" displayName="Home
Directory" readOnly="true" />
+ <c:simple-property name="hadoop.home.dir" displayName="Home
Directory" />
<c:simple-property name="_mainClass" displayName="Main
Class" readOnly="true"
default="org.apache.hadoop.hdfs.server.namenode.NameNode" />
</plugin-configuration>
commit a0060b11bf90aa65423060daf1bdeb34098dc1ca
Author: Lukas Krejci <lkrejci(a)redhat.com>
Date: Tue Jul 31 16:20:43 2012 +0200
try to read the hadoop home dir from the commandline if it is there,
otherwise fall back to the CWD of its process.
diff --git
a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceDiscovery.java
b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceDiscovery.java
index f010976..7b75931 100644
---
a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceDiscovery.java
+++
b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceDiscovery.java
@@ -52,7 +52,9 @@ public class HadoopServiceDiscovery implements
ResourceDiscoveryComponent<Resour
private static final String HADOOP_VERSION_MATCH =
"hadoop-core-([0-9\\.]+)\\.jar";
private static final Pattern HADOOP_VERSION_PATTERN =
Pattern.compile(HADOOP_VERSION_MATCH);
private static final String MAIN_CLASS_PROPERTY = "_mainClass";
-
+ private static final String HOME_DIR_PROPERTY = "hadoop.home.dir";
+ private static final String HOME_DIR_OPTION = "-Dhadoop.home.dir";
+
public Set<DiscoveredResourceDetails> discoverResources(
ResourceDiscoveryContext<ResourceComponent<?>>
resourceDiscoveryContext)
throws InvalidPluginConfigurationException, Exception {
@@ -72,6 +74,9 @@ public class HadoopServiceDiscovery implements
ResourceDiscoveryComponent<Resour
String version = getVersion(cwd);
Configuration pluginConfiguration =
resourceDiscoveryContext.getDefaultPluginConfiguration();
+
+ //TODO is it ok to base the resource key on the current working directory
as opposed to
+ //the configured hadoop.home.dir? How do they differ?
DiscoveredResourceDetails detail = new
DiscoveredResourceDetails(resourceType, // ResourceType
rtName + ":" + cwd, // ResourceKey
rtName, // resource name
@@ -89,7 +94,14 @@ public class HadoopServiceDiscovery implements
ResourceDiscoveryComponent<Resour
pluginConfiguration.getSimpleValue(MAIN_CLASS_PROPERTY, null)));
pluginConfiguration.put(new
PropertySimple(JMXDiscoveryComponent.CONNECTION_TYPE,
LocalVMTypeDescriptor.class.getName()));
-
+
+ String homeDir =
getHadoopHomeDirIfAvailable(psr.getProcessInfo().getCommandLine());
+ if (homeDir == null) {
+ homeDir = cwd;
+ }
+
+ pluginConfiguration.put(new PropertySimple(HOME_DIR_PROPERTY, homeDir));
+
log.debug("Discovered " + detail);
details.add(detail);
@@ -130,4 +142,17 @@ public class HadoopServiceDiscovery implements
ResourceDiscoveryComponent<Resour
}
}
+ private String getHadoopHomeDirIfAvailable(String[] cmdline) {
+ for(int i = 0; i < cmdline.length; ++i) {
+ String cmd = cmdline[i];
+ if (cmd.startsWith(HOME_DIR_OPTION)) {
+ int eqPos = cmd.indexOf('=');
+ if (eqPos > 0) {
+ return cmd.substring(eqPos + 1);
+ }
+ }
+ }
+
+ return null;
+ }
}