Browse Source

HADOOP-2583. Fixes a bug in the Eclipse plug-in UI to edit locations. Plug-in version is now synchronized with Hadoop version.


git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@611553 13f79535-47bb-0310-9956-ffa450edef68
Christophe Taton 17 years ago
parent
commit
0aab9495ef

+ 3 - 0
CHANGES.txt

@@ -240,6 +240,9 @@ Trunk (unreleased changes)
 
   BUG FIXES
 
+    HADOOP-2583.  Fixes a bug in the Eclipse plug-in UI to edit locations.
+    Plug-in version is now synchronized with Hadoop version.
+
     HADOOP-2100.  Remove faulty check for existence of $HADOOP_PID_DIR and let
     'mkdir -p' check & create it. (Michael Bieniosek via acmurthy)
 

+ 1 - 1
src/contrib/eclipse-plugin/META-INF/MANIFEST.MF

@@ -2,7 +2,7 @@ Manifest-Version: 1.0
 Bundle-ManifestVersion: 2
 Bundle-Name: MapReduce Tools for Eclipse
 Bundle-SymbolicName: org.apache.hadoop.eclipse;singleton:=true
-Bundle-Version: 1.1
+Bundle-Version: 0.16
 Bundle-Activator: org.apache.hadoop.eclipse.Activator
 Bundle-Localization: plugin
 Require-Bundle: org.eclipse.ui,

BIN
src/contrib/eclipse-plugin/resources/Old/location-edit-16x16.png


+ 12 - 8
src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/HadoopLocationWizard.java

@@ -242,9 +242,11 @@ public class HadoopLocationWizard extends WizardPage {
     }
     this.setControl(panel /* mediator.folder */);
     {
-      final Button validate = new Button(panel, SWT.NONE);
-      validate.setText("&Load from file");
-      validate.addListener(SWT.Selection, new Listener() {
+      final Button btn = new Button(panel, SWT.NONE);
+      btn.setText("&Load from file");
+      btn.setEnabled(false);
+      btn.setToolTipText("Not yet implemented");
+      btn.addListener(SWT.Selection, new Listener() {
         public void handleEvent(Event e) {
           // TODO
         }
@@ -253,6 +255,8 @@ public class HadoopLocationWizard extends WizardPage {
     {
       final Button validate = new Button(panel, SWT.NONE);
       validate.setText("&Validate location");
+      validate.setEnabled(false);
+      validate.setToolTipText("Not yet implemented");
       validate.addListener(SWT.Selection, new Listener() {
         public void handleEvent(Event e) {
           testLocation();
@@ -311,7 +315,7 @@ public class HadoopLocationWizard extends WizardPage {
         return;
 
       location.setConfProp(prop, propValue);
-      Display.getDefault().asyncExec(new Runnable() {
+      Display.getDefault().syncExec(new Runnable() {
         public void run() {
           getContainer().updateButtons();
         }
@@ -347,7 +351,7 @@ public class HadoopLocationWizard extends WizardPage {
       final String socksProxyPort =
           location.getConfProp(ConfProp.PI_SOCKS_PROXY_PORT);
 
-      Display.getDefault().asyncExec(new Runnable() {
+      Display.getDefault().syncExec(new Runnable() {
         public void run() {
           switch (prop) {
             case PI_JOB_TRACKER_HOST: {
@@ -856,7 +860,7 @@ public class HadoopLocationWizard extends WizardPage {
     public void modifyText(ModifyEvent e) {
       final Text text = (Text) e.widget;
       final ConfProp prop = (ConfProp) text.getData("hProp");
-      Display.getDefault().asyncExec(new Runnable() {
+      Display.getDefault().syncExec(new Runnable() {
         public void run() {
           mediator.notifyChange(TabMain.this, prop, text.getText());
         }
@@ -873,7 +877,7 @@ public class HadoopLocationWizard extends WizardPage {
       final Button button = (Button) e.widget;
       final ConfProp prop = (ConfProp) button.getData("hProp");
 
-      Display.getDefault().asyncExec(new Runnable() {
+      Display.getDefault().syncExec(new Runnable() {
         public void run() {
           // We want to receive the update also!
           mediator.notifyChange(null, prop, button.getSelection() ? "yes"
@@ -953,7 +957,7 @@ public class HadoopLocationWizard extends WizardPage {
       final String propName =
           (hPropName != null) ? (String) hPropName : null;
 
-      Display.getDefault().asyncExec(new Runnable() {
+      Display.getDefault().syncExec(new Runnable() {
         public void run() {
           if (prop != null)
             mediator.notifyChange(TabAdvanced.this, prop, text.getText());

+ 0 - 149
src/contrib/eclipse-plugin/todo.txt

@@ -1,149 +0,0 @@
--- DONE --------------------------
-	* Pref wizard page for hadoop libraries (eugene) -- DONE
-	* running wrong jar bug (julz) -- not using WTP any more DONE
-	* DFS only for hadoop servers (julz) -- DONE
-	* allow per-project hadoop dir, moved selection of hadoop path to first page of wizard (julz) -- DONE
-	* allow creation of new driver as part of new project wizard (julz) -- DONE
-	* BUG: ssh console sometimes drops (eugene) -- DONE
-	* Server Selection wizard - finish button should not be clickable if radio is on create server (eugene) -- DONE
-	* module icons for jar and job (dennis) -- DONE (sort of)
-	
-			 
---- Bugs ---
-
-	* Server Selection wizard has identical name and location -- 
-	
---- Features ----
-
-	* Limit type searches in driver wizard to current project (eugene) 
-	
-	* new.. dialogs on mapred perspective (julz) 
-	
-	* show cheat sheet, more wizardy goodness (julz) 
-
-
---- Documentation ---
-
-	* cheat sheets (dennis)
-
-
---- Testing ---
-
-	* test on mac osx (julz)
-
-
---- Everything ------------------
-
-* Run/Debug.. on Hadoop runs the project on a local hadoop cloud, this will involve finding
-	the appropriate Map/Reduce classes, as a first pass I suggest we have the user specify these in the Run.. dialog
-	therefore this task breaks down to at least:
-	
-	* hadoop new proj. size
-	
-	* generate mapper/reducer screen on new project wizard
-	* title bar, titles on new X wizards
-	* hadoop perspective show cheat sheet
-	* status on server view
-	* double click on jobs, go to associated console
-	* icons for jobs
-	
-	* copy resources directory or similar to dfs, allow configurable resources directory
-	
-	* test installation directory on new server screen (i.e. ssh in and check files are present)
-	
-	* (Eugene) if server has user:pass@hostname in location, ssh file and run it on remote hadoop client
-	
-	* (Daniel) make launch on local hadoop scenario properly work, deploy jar to the server when run
-	
-	* (Julz) read info from 50030 to show jobs running on server
-
-	* contribute Format action for fs, suggest this when a server if first created
-	
-	* Possibly DFS navigator view?
-
-	*	(and to specify input and output? - how should we handle this?)
-	
-	* Restrict browse classes dialog above to subclass of Mapper, Reducer etc., add proposals to text fields
-	
-	* Make launch dialog look pretty
-	
-	* Run the specified Mapper and Reducer on a local server
-	
-	* Allow the user to Run on a server defined in a servers view (i.e. so you can run locally, and on cloud A or B with the same settings)
-	
-	* Allow the user to configure the hadoop server from this view as appropriate
-	
-	* When the job runs, keep the tracker interface and put it into a view in the perspective (see next task!) so the user
-	can track the state
-
-* Add a Hadouken perspective with
-	* the Hadoop targets view (analogous to servers view in WTP project)
-	
-	* the running jobs view which shows the status of running jobs
-	
-	* a Current Lesson/API panel showing html text from the lecturer?
-	
-	* any jazz stuff?
-	
-* JUnit support, specify expected inputs and outputs and run on server, collecting results and presenting a unified view
- similar to the junit component.
--- DONE --------------------------
-
--- Current priorities ------------
-
- ... Dennis, maybe you could move stuff from below up here?
-
---- Everything ------------------
-
-* Run/Debug.. on Hadoop runs the project on a local hadoop cloud, this will involve finding
-	the appropriate Map/Reduce classes, as a first pass I suggest we have the user specify these in the Run.. dialog
-	therefore this task breaks down to at least:
-	
-	* hadoop new proj. size
-	
-	* generate mapper/reducer screen on new project wizard
-	* title bar, titles on new X wizards
-	* auto-focus on main on X wizards, auto show newly created stuff
-	* on new driver screen, specify mapper (allow creation for bonus points)
-	* hadoop perspective show cheat sheet
-	* remove browse button
-	* status on server view
-	* double click on jobs, go to associated console
-	* icons for jobs
-	
-	* (Eugene) if server has user:pass@hostname in location, ssh file and run it on remote hadoop client
-	
-	* (Daniel) make launch on local hadoop scenario properly work, deploy jar to the server when run
-	
-	* (Julz) read info from 50030 to show jobs running on server
-
-	* contribute Format action for fs, suggest this when a server if first created
-	
-	* Possibly DFS navigator view?
-
-	*	(and to specify input and output? - how should we handle this?)
-	
-	* Restrict browse classes dialog above to subclass of Mapper, Reducer etc., add proposals to text fields
-	
-	* Make launch dialog look pretty
-	
-	* Run the specified Mapper and Reducer on a local server
-	
-	* Allow the user to Run on a server defined in a servers view (i.e. so you can run locally, and on cloud A or B with the same settings)
-	
-	* Allow the user to configure the hadoop server from this view as appropriate
-	
-	* When the job runs, keep the tracker interface and put it into a view in the perspective (see next task!) so the user
-	can track the state
-
-* Add a Hadouken perspective with
-	* the Hadoop targets view (analogous to servers view in WTP project)
-	
-	* the running jobs view which shows the status of running jobs
-	
-	* a Current Lesson/API panel showing html text from the lecturer?
-	
-	* any jazz stuff?
-	
-* JUnit support, specify expected inputs and outputs and run on server, collecting results and presenting a unified view
- similar to the junit component.