Browse Source

Merge -r 704260:704261 from trunk to branch-0.19 to fix HADOOP-4393.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/branches/branch-0.19@704262 13f79535-47bb-0310-9956-ffa450edef68
Arun Murthy 17 years ago
parent
commit
65d22abd45

+ 4 - 0
CHANGES.txt

@@ -837,6 +837,10 @@ Release 0.19.0 - Unreleased
 
     HADOOP-4014. Create hard links with 'fsutil hardlink' on Windows. (shv)
 
+    HADOOP-4393. Merged org.apache.hadoop.fs.permission.AccessControlException
+    and org.apache.hadoop.security.AccessControlIOException into a single
+    class hadoop.security.AccessControlException. (omalley via acmurthy)
+
 Release 0.18.2 - Unreleased
 
   BUG FIXES

+ 7 - 5
src/core/org/apache/hadoop/fs/permission/AccessControlException.java

@@ -17,10 +17,10 @@
  */
 package org.apache.hadoop.fs.permission;
 
-/**
- * An exception class for access control related issues.
- */
-public class AccessControlException extends java.io.IOException {
+import java.io.IOException;
+
+@Deprecated
+public class AccessControlException extends IOException {
   //Required by {@link java.io.Serializable}.
   private static final long serialVersionUID = 1L;
 
@@ -37,5 +37,7 @@ public class AccessControlException extends java.io.IOException {
    * with the specified detail message.
    * @param s the detail message.
    */
-  public AccessControlException(String s) {super(s);}
+  public AccessControlException(String s) {
+    super(s);
+  }
 }

+ 9 - 11
src/core/org/apache/hadoop/security/AccessControlIOException.java → src/core/org/apache/hadoop/security/AccessControlException.java

@@ -17,29 +17,27 @@
  */
 package org.apache.hadoop.security;
 
-import java.io.IOException;
-
 /**
- * An exception indicating access control violations.  
+ * An exception class for access control related issues.
  */
-public class AccessControlIOException extends IOException {
+public class AccessControlException 
+    extends org.apache.hadoop.fs.permission.AccessControlException {
+
+  //Required by {@link java.io.Serializable}.
+  private static final long serialVersionUID = 1L;
 
-  private static final long serialVersionUID = -1874018786480045420L;
-  
   /**
    * Default constructor is needed for unwrapping from 
    * {@link org.apache.hadoop.ipc.RemoteException}.
    */
-  public AccessControlIOException() {
+  public AccessControlException() {
     super("Permission denied.");
   }
 
   /**
-   * Constructs an {@link AccessControlIOException}
+   * Constructs an {@link AccessControlException}
    * with the specified detail message.
    * @param s the detail message.
    */
-  public AccessControlIOException(String s) {
-    super(s);
-  }
+  public AccessControlException(String s) {super(s);}
 }

+ 1 - 1
src/hdfs/org/apache/hadoop/hdfs/DFSClient.java

@@ -22,7 +22,6 @@ import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryProxy;
 import org.apache.hadoop.fs.*;
-import org.apache.hadoop.fs.permission.AccessControlException;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.ipc.*;
 import org.apache.hadoop.net.NetUtils;
@@ -34,6 +33,7 @@ import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.hdfs.server.namenode.NotReplicatedYetException;
+import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UnixUserGroupInformation;
 import org.apache.hadoop.util.*;
 

+ 1 - 0
src/hdfs/org/apache/hadoop/hdfs/protocol/ClientProtocol.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.protocol;
 import java.io.*;
 
 import org.apache.hadoop.ipc.VersionedProtocol;
+import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.hdfs.protocol.FSConstants.UpgradeAction;
 import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
 import org.apache.hadoop.fs.permission.*;

+ 1 - 0
src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java

@@ -29,6 +29,7 @@ import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
 import org.apache.hadoop.hdfs.server.namenode.BlocksMap.BlockInfo;
 import org.apache.hadoop.hdfs.server.namenode.metrics.FSNamesystemMBean;
 import org.apache.hadoop.hdfs.server.namenode.metrics.FSNamesystemMetrics;
+import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UnixUserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.*;

+ 1 - 0
src/hdfs/org/apache/hadoop/hdfs/server/namenode/PermissionChecker.java

@@ -23,6 +23,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.permission.*;
 import org.apache.hadoop.ipc.Server;
+import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
 
 /** Perform permission checking in {@link FSNamesystem}. */

+ 2 - 3
src/mapred/org/apache/hadoop/mapred/JobTracker.java

@@ -49,7 +49,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.AccessControlException;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RemoteException;
@@ -64,7 +63,7 @@ import org.apache.hadoop.net.NetworkTopology;
 import org.apache.hadoop.net.Node;
 import org.apache.hadoop.net.NodeBase;
 import org.apache.hadoop.net.ScriptBasedMapping;
-import org.apache.hadoop.security.AccessControlIOException;
+import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.HostsFileReader;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -2243,7 +2242,7 @@ public class JobTracker implements MRConstants, InterTrackerProtocol,
     // get the queue
     String queue = job.getProfile().getQueueName();
     if (!queueManager.hasAccess(queue, job, oper, ugi)) {
-      throw new AccessControlIOException("User " 
+      throw new AccessControlException("User " 
                             + ugi.getUserName() 
                             + " cannot perform "
                             + "operation " + oper + " on queue " + queue);

+ 1 - 0
src/test/org/apache/hadoop/hdfs/TestDFSPermission.java

@@ -28,6 +28,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.server.common.Util;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.fs.permission.*;
+import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UnixUserGroupInformation;
 
 import junit.framework.AssertionFailedError;

+ 1 - 1
src/test/org/apache/hadoop/hdfs/TestFileAppend2.java

@@ -28,7 +28,6 @@ import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.AccessControlException;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset;
@@ -36,6 +35,7 @@ import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
 import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UnixUserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation;
 

+ 1 - 1
src/tools/org/apache/hadoop/tools/DistCp.java

@@ -42,7 +42,6 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.AccessControlException;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
 import org.apache.hadoop.io.LongWritable;
@@ -63,6 +62,7 @@ import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.SequenceFileRecordReader;
+import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;