modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/components/configuration/ConfigurationEditor.java | 12 + modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/inventory/resource/detail/operation/history/ResourceOperationHistoryDetailsView.java | 1 modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopOperationsDelegate.java | 61 ++++++- modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerComponent.java | 7 modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopSupportedOperations.java | 5 modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml | 79 +++++++++- 6 files changed, 147 insertions(+), 18 deletions(-)
New commits: commit ccfe434abcf13690aab214d0aeb4472ce79658cc Author: Jirka Kremser jkremser@redhat.com Date: Thu Aug 2 12:51:40 2012 +0200
New set of operations: start, stop for all node types; list job queue for job tracker
diff --git a/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/components/configuration/ConfigurationEditor.java b/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/components/configuration/ConfigurationEditor.java index aec7274..e5a321c 100644 --- a/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/components/configuration/ConfigurationEditor.java +++ b/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/components/configuration/ConfigurationEditor.java @@ -175,6 +175,7 @@ public class ConfigurationEditor extends LocatableVLayout { private String editorTitle = null; private boolean readOnly = false; private boolean allPropertiesWritable = false; + private boolean preserveTextFormatting = false; private Map<String, String> invalidPropertyNameToDisplayNameMap = new HashMap<String, String>(); private Set<PropertyValueChangeListener> propertyValueChangeListeners = new HashSet<PropertyValueChangeListener>();
@@ -252,6 +253,14 @@ public class ConfigurationEditor extends LocatableVLayout { public void setAllPropertiesWritable(boolean allPropertiesWritable) { this.allPropertiesWritable = allPropertiesWritable; } + + public boolean isPreserveTextFormatting() { + return preserveTextFormatting; + } + + public void setPreserveTextFormatting(boolean preserveFormatting) { + this.preserveTextFormatting = preserveFormatting; + }
public String getEditorTitle() { return editorTitle; @@ -1411,6 +1420,9 @@ public class ConfigurationEditor extends LocatableVLayout { // Property values are user-editable, so escape HTML when displayed as static text, to prevent XSS attacks. value = StringUtility.escapeHtml(value); } + if (preserveTextFormatting) { + value = "<pre>" + value + "</pre>"; + } valueItem.setValue(value);
setValueAsTooltipIfAppropriate(valueItem, value); diff --git a/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/inventory/resource/detail/operation/history/ResourceOperationHistoryDetailsView.java b/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/inventory/resource/detail/operation/history/ResourceOperationHistoryDetailsView.java index d7c118a..299b0eb 100644 --- a/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/inventory/resource/detail/operation/history/ResourceOperationHistoryDetailsView.java +++ b/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/inventory/resource/detail/operation/history/ResourceOperationHistoryDetailsView.java @@ -143,6 +143,7 @@ public class ResourceOperationHistoryDetailsView extends AbstractOperationHistor && !resultsConfigurationDefinition.getPropertyDefinitions().isEmpty()) { ConfigurationEditor editor = new ConfigurationEditor(extendLocatorId("results"), operationDefinition.getResultsConfigurationDefinition(), operationHistory.getResults()); + editor.setPreserveTextFormatting(true); editor.setReadOnly(true); resultsSection.addMember(editor); } else { diff --git a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopOperationsDelegate.java b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopOperationsDelegate.java index 933eabd..6ebead7 100644 --- a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopOperationsDelegate.java +++ b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopOperationsDelegate.java @@ -24,6 +24,7 @@ import org.jetbrains.annotations.NotNull;
import org.rhq.core.domain.configuration.Configuration; import org.rhq.core.pluginapi.inventory.InvalidPluginConfigurationException; +import org.rhq.core.pluginapi.inventory.ResourceComponent; import org.rhq.core.pluginapi.inventory.ResourceContext; import org.rhq.core.pluginapi.operation.OperationResult; import org.rhq.core.system.ProcessExecution; @@ -41,14 +42,14 @@ public class HadoopOperationsDelegate { private static final long MAX_WAIT = 1000 * 60 * 5; private static final int MAX_OUTPUT = 2048;
- private ResourceContext<HadoopServerComponent> resourceContext; + private ResourceContext<? extends ResourceComponent<?>> resourceContext;
- public HadoopOperationsDelegate(ResourceContext<HadoopServerComponent> resourceContext) { + public HadoopOperationsDelegate(ResourceContext<? extends ResourceComponent<?>> resourceContext) { this.resourceContext = resourceContext; }
public OperationResult invoke(@NotNull - HadoopSupportedOperations operation, Configuration parameters) throws InterruptedException { + HadoopSupportedOperations operation, Configuration parameters, String serverType) throws InterruptedException {
ProcessExecutionResults results = null; switch (operation) { @@ -62,6 +63,15 @@ public class HadoopOperationsDelegate { case LS: results = ls(operation); break; + case START: + results = start(operation, serverType); + break; + case STOP: + results = stop(operation, serverType); + break; + case QUEUE_LIST: + results = queueList(operation); + break; default: throw new UnsupportedOperationException(operation.toString()); } @@ -77,26 +87,52 @@ public class HadoopOperationsDelegate { }
/** + * @param operation supported Hadoop operation @see HadoopSupportedOperations + * @return the object encapsulating the exit code, err output and std output + */ + private ProcessExecutionResults queueList(HadoopSupportedOperations operation) { + return invokeGeneralOperation(operation); + } + + /** + * @param operation supported Hadoop operation @see HadoopSupportedOperations + * @param serverType {Name|Data|SecondaryName}Node / {Job|Task}Tracker + * @return the object encapsulating the exit code, err output and std output + */ + private ProcessExecutionResults stop(HadoopSupportedOperations operation, String serverType) { + return invokeGeneralOperation(operation, serverType); + } + + /** + * @param operation supported Hadoop operation @see HadoopSupportedOperations + * @param serverType {Name|Data|SecondaryName}Node / {Job|Task}Tracker + * @return the object encapsulating the exit code, err output and std output + */ + private ProcessExecutionResults start(HadoopSupportedOperations operation, String serverType) { + return invokeGeneralOperation(operation, serverType); + } + + /** * Format a new distributed filesystem * by running $bin/hadoop namenode -format * - * @return message + * @return the object encapsulating the exit code, err output and std output */ private ProcessExecutionResults format(HadoopSupportedOperations operation) { return invokeGeneralOperation(operation); }
/** - * @param operation - * @return + * @param operation supported Hadoop operation @see HadoopSupportedOperations + * @return the object encapsulating the exit code, err output and std output */ private ProcessExecutionResults ls(HadoopSupportedOperations operation) { return invokeGeneralOperation(operation); }
/** - * @param operation - * @return + * @param operation supported Hadoop operation @see HadoopSupportedOperations + * @return the object encapsulating the exit code, err output and std output */ private ProcessExecutionResults fsck(HadoopSupportedOperations operation) { return invokeGeneralOperation(operation); @@ -146,12 +182,17 @@ public class HadoopOperationsDelegate { }
private ProcessExecutionResults invokeGeneralOperation(HadoopSupportedOperations operation) { + return invokeGeneralOperation(operation, null); + } + + private ProcessExecutionResults invokeGeneralOperation(HadoopSupportedOperations operation, String serverType) { String hadoopHome = resourceContext.getPluginConfiguration() .getSimple(HadoopServerDiscovery.HOME_DIR_PROPERTY).getStringValue(); String executable = hadoopHome + operation.getRelativePathToExecutable(); + String args = operation.getArgs() + (serverType == null ? "" : serverType.toLowerCase());
- ProcessExecutionResults results = executeExecutable(resourceContext.getSystemInformation(), executable, - operation.getArgs(), MAX_WAIT, true, true); + ProcessExecutionResults results = executeExecutable(resourceContext.getSystemInformation(), executable, args, + MAX_WAIT, true, true); return results; } } diff --git a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerComponent.java b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerComponent.java index 9acc4ff..f6911b9 100644 --- a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerComponent.java +++ b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerComponent.java @@ -163,7 +163,12 @@ public class HadoopServerComponent extends JMXServerComponent<ResourceComponent< */ public OperationResult invokeOperation(String name, Configuration params) throws Exception { HadoopSupportedOperations operation = HadoopSupportedOperations.valueOf(name.toUpperCase()); - return operationsDelegate.invoke(operation, params); + String serverType = getServerType(); + OperationResult result = operationsDelegate.invoke(operation, params, serverType); + if (operation.equals(HadoopSupportedOperations.START) || operation.equals(HadoopSupportedOperations.STOP)) { + getResourceContext().getAvailabilityContext().requestAvailabilityCheck(); + } + return result; }
private File determineLogFile() { diff --git a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopSupportedOperations.java b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopSupportedOperations.java index faddc95..ec0fe0e 100644 --- a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopSupportedOperations.java +++ b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopSupportedOperations.java @@ -26,7 +26,10 @@ package org.rhq.plugins.hadoop; public enum HadoopSupportedOperations { FORMAT("/bin/hadoop", "namenode -format"), FSCK("/bin/hadoop", "fsck /"), - LS("/bin/hadoop", "fs -ls"); + LS("/bin/hadoop", "fs -ls"), + START("/bin/hadoop-daemon.sh", "start "), + STOP("/bin/hadoop-daemon.sh", "stop "), + QUEUE_LIST("/bin/hadoop", "queue -list");
private final String relativePathToExecutable;
diff --git a/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml b/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml index 1b3a372..48b647b 100644 --- a/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml +++ b/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml @@ -10,19 +10,36 @@ <c:simple-property name="hadoop.home.dir" displayName="Home Directory"/> <c:simple-property name="_mainClass" displayName="Main Class" readOnly="true" default="org.apache.hadoop.hdfs.server.namenode.NameNode"/> - <c:simple-property name="logPollingInterval" default="60" description="The interval for log file polling in seconds."/> + <c:simple-property name="logPollingInterval" default="60" + description="The interval for log file polling in seconds."/> </plugin-configuration>
<process-scan name="NameNode" query="process|basename|match=^java.*,arg|-Dproc_namenode|match=.*"/> - + <operation name="format" displayName="Format dfs" description="Format a new distributed-filesystem."> - <results><c:simple-property name="operationResult" description="Outcome of formatting the dfs."/></results> + <results> + <c:simple-property name="operationResult" description="Outcome of formatting the dfs."/> + </results> </operation> <operation name="fsck" displayName="Check dfs" description="Runs a HDFS filesystem checking utility."> - <results><c:simple-property name="operationResult" description="Outcome of checking the dfs."/></results> + <results> + <c:simple-property name="operationResult" description="Outcome of checking the dfs."/> + </results> </operation> <operation name="ls" displayName="Lists dfs" description="Lists the content of the distributed-filesystem."> - <results><c:simple-property name="operationResult" description="Outcome of listing the dfs."/></results> + <results> + <c:simple-property name="operationResult" description="Outcome of listing the dfs."/> + </results> + </operation> + <operation name="start" displayName="Start NameNode" description="Starts the NameNode instance."> + <results> + <c:simple-property name="operationResult" description="Outcome of starting the NameNode."/> + </results> + </operation> + <operation name="stop" displayName="Stop NameNode" description="Stops the NameNode instance."> + <results> + <c:simple-property name="operationResult" description="Outcome of stopping the NameNode."/> + </results> </operation>
<metric property="Hadoop:service=NameNode,name=NameNodeInfo:NameDirStatuses" displayName="NameNode Storage" @@ -69,7 +86,19 @@ <c:simple-property name="_mainClass" displayName="Main Class" readOnly="true" default="org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode"/> </plugin-configuration> + <process-scan name="SecondaryNameNode" query="process|basename|match=^java.*,arg|-Dproc_secondarynamenode|match=.*"/> + + <operation name="start" displayName="Start SecondaryNameNode" description="Starts the SecondaryNameNode instance."> + <results> + <c:simple-property name="operationResult" description="Outcome of starting the SecondaryNameNode"/> + </results> + </operation> + <operation name="stop" displayName="Stop SecondaryNameNode" description="Stops the SecondaryNameNode instance."> + <results> + <c:simple-property name="operationResult" description="Outcome of stopping the SecondaryNameNode"/> + </results> + </operation> </server>
<!-- DataNode (http://wiki.apache.org/hadoop/DataNode) --> @@ -81,6 +110,17 @@
<process-scan name="DataNode" query="process|basename|match=^java.*,arg|*|match=.*proc_datanode.*"/>
+ <operation name="start" displayName="Start DataNode" description="Starts the DataNode instance."> + <results> + <c:simple-property name="operationResult" description="Outcome of starting the DataNode"/> + </results> + </operation> + <operation name="stop" displayName="Stop DataNode" description="Stops the DataNode instance."> + <results> + <c:simple-property name="operationResult" description="Outcome of stopping the DataNode"/> + </results> + </operation> + <metric property="Hadoop:service=DataNode,name=DataNode*:bytes_written" displayName="Bytes Writter" measurementType="trendsup"/> <metric property="Hadoop:service=DataNode,name=FSDatasetState*:Remaining" displayName="Remaining" units="bytes"/> @@ -104,6 +144,22 @@
<process-scan name="JobTracker" query="process|basename|match=^java.*,arg|-Dproc_jobtracker|match=.*"/>
+ <operation name="start" displayName="Start JobTracker" description="Starts the JobTracker instance."> + <results> + <c:simple-property name="operationResult" description="Outcome of starting the JobTracker"/> + </results> + </operation> + <operation name="stop" displayName="Stop JobTracker" description="Stops the JobTracker instance."> + <results> + <c:simple-property name="operationResult" description="Outcome of stopping the JobTracker"/> + </results> + </operation> + <operation name="queue_list" displayName="Lists job queue" description="Lists the content of the job queue."> + <results> + <c:simple-property name="operationResult" description="Outcome of listing the job queue"/> + </results> + </operation> + <metric property="Hadoop:service=JobTracker,name=JobTrackerMetrics:jobs_completed" displayName="Jobs Completed" displayType="summary"/> <metric property="Hadoop:service=JobTracker,name=JobTrackerMetrics:jobs_running" displayName="Jobs Running" @@ -154,6 +210,17 @@
<process-scan name="TaskTracker" query="process|basename|match=^java.*,arg|-Dproc_tasktracker|match=.*"/>
+ <operation name="start" displayName="Start TaskTracker" description="Starts the TaskTracker instance."> + <results> + <c:simple-property name="operationResult" description="Outcome of starting the TaskTracker"/> + </results> + </operation> + <operation name="stop" displayName="Stop TaskTracker" description="Stops the TaskTracker instance."> + <results> + <c:simple-property name="operationResult" description="Outcome of stopping the TaskTracker"/> + </results> + </operation> + <metric property="Hadoop:service=TaskTracker,name=TaskTrackerMetrics:mapTaskSlots" displayName="Map Task Slots"/> <metric property="Hadoop:service=TaskTracker,name=TaskTrackerMetrics:reduceTaskSlots" displayName="Reduce Task Slots"/> <metric property="Hadoop:service=TaskTracker,name=TaskTrackerMetrics:maps_running" displayName="Running Map Tasks"/> @@ -167,4 +234,4 @@ dataType="trait"/>
</server> -</plugin> \ No newline at end of file +</plugin>