Procházet zdrojové kódy

HDFS-3291. add test that covers HttpFS working w/ a non-HDFS Hadoop filesystem (tucu)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1370387 13f79535-47bb-0310-9956-ffa450edef68
Alejandro Abdelnur před 13 roky
rodič
revize
754fd7b2fb

+ 162 - 135
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java → hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java

@@ -18,19 +18,6 @@
 
 
 package org.apache.hadoop.fs.http.client;
 package org.apache.hadoop.fs.http.client;
 
 
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.Writer;
-import java.net.URI;
-import java.net.URL;
-import java.security.PrivilegedExceptionAction;
-import java.util.Arrays;
-import java.util.Collection;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.ContentSummary;
@@ -47,7 +34,6 @@ import org.apache.hadoop.test.HadoopUsersConfTestHelper;
 import org.apache.hadoop.test.TestDir;
 import org.apache.hadoop.test.TestDir;
 import org.apache.hadoop.test.TestDirHelper;
 import org.apache.hadoop.test.TestDirHelper;
 import org.apache.hadoop.test.TestHdfs;
 import org.apache.hadoop.test.TestHdfs;
-import org.apache.hadoop.test.TestHdfsHelper;
 import org.apache.hadoop.test.TestJetty;
 import org.apache.hadoop.test.TestJetty;
 import org.apache.hadoop.test.TestJettyHelper;
 import org.apache.hadoop.test.TestJettyHelper;
 import org.junit.Assert;
 import org.junit.Assert;
@@ -57,8 +43,31 @@ import org.junit.runners.Parameterized;
 import org.mortbay.jetty.Server;
 import org.mortbay.jetty.Server;
 import org.mortbay.jetty.webapp.WebAppContext;
 import org.mortbay.jetty.webapp.WebAppContext;
 
 
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Writer;
+import java.net.URI;
+import java.net.URL;
+import java.security.PrivilegedExceptionAction;
+import java.util.Arrays;
+import java.util.Collection;
+
 @RunWith(value = Parameterized.class)
 @RunWith(value = Parameterized.class)
-public class TestHttpFSFileSystem extends HFSTestCase {
+public abstract class BaseTestHttpFSWith extends HFSTestCase {
+
+  protected abstract Path getProxiedFSTestDir();
+
+  protected abstract String getProxiedFSURI();
+
+  protected abstract Configuration getProxiedFSConf();
+
+  protected boolean isLocalFS() {
+    return getProxiedFSURI().startsWith("file://");
+  }
 
 
   private void createHttpFSServer() throws Exception {
   private void createHttpFSServer() throws Exception {
     File homeDir = TestDirHelper.getTestDir();
     File homeDir = TestDirHelper.getTestDir();
@@ -72,8 +81,8 @@ public class TestHttpFSFileSystem extends HFSTestCase {
     w.write("secret");
     w.write("secret");
     w.close();
     w.close();
 
 
-    //HDFS configuration
-    String fsDefaultName = TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
+    //FileSystem being served by HttpFS
+    String fsDefaultName = getProxiedFSURI();
     Configuration conf = new Configuration(false);
     Configuration conf = new Configuration(false);
     conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
     conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
     File hdfsSite = new File(new File(homeDir, "conf"), "hdfs-site.xml");
     File hdfsSite = new File(new File(homeDir, "conf"), "hdfs-site.xml");
@@ -105,7 +114,7 @@ public class TestHttpFSFileSystem extends HFSTestCase {
     return HttpFSFileSystem.class;
     return HttpFSFileSystem.class;
   }
   }
 
 
-  protected FileSystem getHttpFileSystem() throws Exception {
+  protected FileSystem getHttpFSFileSystem() throws Exception {
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
     conf.set("fs.webhdfs.impl", getFileSystemClass().getName());
     conf.set("fs.webhdfs.impl", getFileSystemClass().getName());
     URI uri = new URI("webhdfs://" +
     URI uri = new URI("webhdfs://" +
@@ -114,7 +123,7 @@ public class TestHttpFSFileSystem extends HFSTestCase {
   }
   }
 
 
   protected void testGet() throws Exception {
   protected void testGet() throws Exception {
-    FileSystem fs = getHttpFileSystem();
+    FileSystem fs = getHttpFSFileSystem();
     Assert.assertNotNull(fs);
     Assert.assertNotNull(fs);
     URI uri = new URI("webhdfs://" +
     URI uri = new URI("webhdfs://" +
                       TestJettyHelper.getJettyURL().toURI().getAuthority());
                       TestJettyHelper.getJettyURL().toURI().getAuthority());
@@ -123,13 +132,13 @@ public class TestHttpFSFileSystem extends HFSTestCase {
   }
   }
 
 
   private void testOpen() throws Exception {
   private void testOpen() throws Exception {
-    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
-    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
+    FileSystem fs = FileSystem.get(getProxiedFSConf());
+    Path path = new Path(getProxiedFSTestDir(), "foo.txt");
     OutputStream os = fs.create(path);
     OutputStream os = fs.create(path);
     os.write(1);
     os.write(1);
     os.close();
     os.close();
     fs.close();
     fs.close();
-    fs = getHttpFileSystem();
+    fs = getHttpFSFileSystem();
     InputStream is = fs.open(new Path(path.toUri().getPath()));
     InputStream is = fs.open(new Path(path.toUri().getPath()));
     Assert.assertEquals(is.read(), 1);
     Assert.assertEquals(is.read(), 1);
     is.close();
     is.close();
@@ -137,7 +146,7 @@ public class TestHttpFSFileSystem extends HFSTestCase {
   }
   }
 
 
   private void testCreate(Path path, boolean override) throws Exception {
   private void testCreate(Path path, boolean override) throws Exception {
-    FileSystem fs = getHttpFileSystem();
+    FileSystem fs = getHttpFSFileSystem();
     FsPermission permission = new FsPermission(FsAction.READ_WRITE, FsAction.NONE, FsAction.NONE);
     FsPermission permission = new FsPermission(FsAction.READ_WRITE, FsAction.NONE, FsAction.NONE);
     OutputStream os = fs.create(new Path(path.toUri().getPath()), permission, override, 1024,
     OutputStream os = fs.create(new Path(path.toUri().getPath()), permission, override, 1024,
                                 (short) 2, 100 * 1024 * 1024, null);
                                 (short) 2, 100 * 1024 * 1024, null);
@@ -145,10 +154,12 @@ public class TestHttpFSFileSystem extends HFSTestCase {
     os.close();
     os.close();
     fs.close();
     fs.close();
 
 
-    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    fs = FileSystem.get(getProxiedFSConf());
     FileStatus status = fs.getFileStatus(path);
     FileStatus status = fs.getFileStatus(path);
-    Assert.assertEquals(status.getReplication(), 2);
-    Assert.assertEquals(status.getBlockSize(), 100 * 1024 * 1024);
+    if (!isLocalFS()) {
+      Assert.assertEquals(status.getReplication(), 2);
+      Assert.assertEquals(status.getBlockSize(), 100 * 1024 * 1024);
+    }
     Assert.assertEquals(status.getPermission(), permission);
     Assert.assertEquals(status.getPermission(), permission);
     InputStream is = fs.open(path);
     InputStream is = fs.open(path);
     Assert.assertEquals(is.read(), 1);
     Assert.assertEquals(is.read(), 1);
@@ -157,66 +168,70 @@ public class TestHttpFSFileSystem extends HFSTestCase {
   }
   }
 
 
   private void testCreate() throws Exception {
   private void testCreate() throws Exception {
-    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
+    Path path = new Path(getProxiedFSTestDir(), "foo.txt");
     testCreate(path, false);
     testCreate(path, false);
     testCreate(path, true);
     testCreate(path, true);
     try {
     try {
       testCreate(path, false);
       testCreate(path, false);
-      Assert.fail();
+      Assert.fail("the create should have failed because the file exists " +
+                  "and override is FALSE");
     } catch (IOException ex) {
     } catch (IOException ex) {
 
 
     } catch (Exception ex) {
     } catch (Exception ex) {
-      Assert.fail();
+      Assert.fail(ex.toString());
     }
     }
   }
   }
 
 
   private void testAppend() throws Exception {
   private void testAppend() throws Exception {
-    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
-    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
-    OutputStream os = fs.create(path);
-    os.write(1);
-    os.close();
-    fs.close();
-    fs = getHttpFileSystem();
-    os = fs.append(new Path(path.toUri().getPath()));
-    os.write(2);
-    os.close();
-    fs.close();
-    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
-    InputStream is = fs.open(path);
-    Assert.assertEquals(is.read(), 1);
-    Assert.assertEquals(is.read(), 2);
-    Assert.assertEquals(is.read(), -1);
-    is.close();
-    fs.close();
+    if (!isLocalFS()) {
+      FileSystem fs = FileSystem.get(getProxiedFSConf());
+      fs.mkdirs(getProxiedFSTestDir());
+      Path path = new Path(getProxiedFSTestDir(), "foo.txt");
+      OutputStream os = fs.create(path);
+      os.write(1);
+      os.close();
+      fs.close();
+      fs = getHttpFSFileSystem();
+      os = fs.append(new Path(path.toUri().getPath()));
+      os.write(2);
+      os.close();
+      fs.close();
+      fs = FileSystem.get(getProxiedFSConf());
+      InputStream is = fs.open(path);
+      Assert.assertEquals(is.read(), 1);
+      Assert.assertEquals(is.read(), 2);
+      Assert.assertEquals(is.read(), -1);
+      is.close();
+      fs.close();
+    }
   }
   }
 
 
   private void testRename() throws Exception {
   private void testRename() throws Exception {
-    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
-    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo");
+    FileSystem fs = FileSystem.get(getProxiedFSConf());
+    Path path = new Path(getProxiedFSTestDir(), "foo");
     fs.mkdirs(path);
     fs.mkdirs(path);
     fs.close();
     fs.close();
-    fs = getHttpFileSystem();
+    fs = getHttpFSFileSystem();
     Path oldPath = new Path(path.toUri().getPath());
     Path oldPath = new Path(path.toUri().getPath());
     Path newPath = new Path(path.getParent(), "bar");
     Path newPath = new Path(path.getParent(), "bar");
     fs.rename(oldPath, newPath);
     fs.rename(oldPath, newPath);
     fs.close();
     fs.close();
-    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    fs = FileSystem.get(getProxiedFSConf());
     Assert.assertFalse(fs.exists(oldPath));
     Assert.assertFalse(fs.exists(oldPath));
     Assert.assertTrue(fs.exists(newPath));
     Assert.assertTrue(fs.exists(newPath));
     fs.close();
     fs.close();
   }
   }
 
 
   private void testDelete() throws Exception {
   private void testDelete() throws Exception {
-    Path foo = new Path(TestHdfsHelper.getHdfsTestDir(), "foo");
-    Path bar = new Path(TestHdfsHelper.getHdfsTestDir(), "bar");
-    Path foe = new Path(TestHdfsHelper.getHdfsTestDir(), "foe");
-    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    Path foo = new Path(getProxiedFSTestDir(), "foo");
+    Path bar = new Path(getProxiedFSTestDir(), "bar");
+    Path foe = new Path(getProxiedFSTestDir(), "foe");
+    FileSystem fs = FileSystem.get(getProxiedFSConf());
     fs.mkdirs(foo);
     fs.mkdirs(foo);
     fs.mkdirs(new Path(bar, "a"));
     fs.mkdirs(new Path(bar, "a"));
     fs.mkdirs(foe);
     fs.mkdirs(foe);
 
 
-    FileSystem hoopFs = getHttpFileSystem();
+    FileSystem hoopFs = getHttpFSFileSystem();
     Assert.assertTrue(hoopFs.delete(new Path(foo.toUri().getPath()), false));
     Assert.assertTrue(hoopFs.delete(new Path(foo.toUri().getPath()), false));
     Assert.assertFalse(fs.exists(foo));
     Assert.assertFalse(fs.exists(foo));
     try {
     try {
@@ -239,15 +254,15 @@ public class TestHttpFSFileSystem extends HFSTestCase {
   }
   }
 
 
   private void testListStatus() throws Exception {
   private void testListStatus() throws Exception {
-    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
-    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
+    FileSystem fs = FileSystem.get(getProxiedFSConf());
+    Path path = new Path(getProxiedFSTestDir(), "foo.txt");
     OutputStream os = fs.create(path);
     OutputStream os = fs.create(path);
     os.write(1);
     os.write(1);
     os.close();
     os.close();
     FileStatus status1 = fs.getFileStatus(path);
     FileStatus status1 = fs.getFileStatus(path);
     fs.close();
     fs.close();
 
 
-    fs = getHttpFileSystem();
+    fs = getHttpFSFileSystem();
     FileStatus status2 = fs.getFileStatus(new Path(path.toUri().getPath()));
     FileStatus status2 = fs.getFileStatus(new Path(path.toUri().getPath()));
     fs.close();
     fs.close();
 
 
@@ -267,16 +282,20 @@ public class TestHttpFSFileSystem extends HFSTestCase {
   }
   }
 
 
   private void testWorkingdirectory() throws Exception {
   private void testWorkingdirectory() throws Exception {
-    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    FileSystem fs = FileSystem.get(getProxiedFSConf());
     Path workingDir = fs.getWorkingDirectory();
     Path workingDir = fs.getWorkingDirectory();
     fs.close();
     fs.close();
 
 
-    fs = getHttpFileSystem();
-    Path hoopWorkingDir = fs.getWorkingDirectory();
+    fs = getHttpFSFileSystem();
+    if (isLocalFS()) {
+      fs.setWorkingDirectory(workingDir);
+    }
+    Path httpFSWorkingDir = fs.getWorkingDirectory();
     fs.close();
     fs.close();
-    Assert.assertEquals(hoopWorkingDir.toUri().getPath(), workingDir.toUri().getPath());
+    Assert.assertEquals(httpFSWorkingDir.toUri().getPath(),
+                        workingDir.toUri().getPath());
 
 
-    fs = getHttpFileSystem();
+    fs = getHttpFSFileSystem();
     fs.setWorkingDirectory(new Path("/tmp"));
     fs.setWorkingDirectory(new Path("/tmp"));
     workingDir = fs.getWorkingDirectory();
     workingDir = fs.getWorkingDirectory();
     fs.close();
     fs.close();
@@ -284,62 +303,64 @@ public class TestHttpFSFileSystem extends HFSTestCase {
   }
   }
 
 
   private void testMkdirs() throws Exception {
   private void testMkdirs() throws Exception {
-    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo");
-    FileSystem fs = getHttpFileSystem();
+    Path path = new Path(getProxiedFSTestDir(), "foo");
+    FileSystem fs = getHttpFSFileSystem();
     fs.mkdirs(path);
     fs.mkdirs(path);
     fs.close();
     fs.close();
-    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    fs = FileSystem.get(getProxiedFSConf());
     Assert.assertTrue(fs.exists(path));
     Assert.assertTrue(fs.exists(path));
     fs.close();
     fs.close();
   }
   }
 
 
   private void testSetTimes() throws Exception {
   private void testSetTimes() throws Exception {
-    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
-    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
-    OutputStream os = fs.create(path);
-    os.write(1);
-    os.close();
-    FileStatus status1 = fs.getFileStatus(path);
-    fs.close();
-    long at = status1.getAccessTime();
-    long mt = status1.getModificationTime();
-
-    fs = getHttpFileSystem();
-    fs.setTimes(path, mt + 10, at + 20);
-    fs.close();
-
-    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
-    status1 = fs.getFileStatus(path);
-    fs.close();
-    long atNew = status1.getAccessTime();
-    long mtNew = status1.getModificationTime();
-    Assert.assertEquals(mtNew, mt + 10);
-    Assert.assertEquals(atNew, at + 20);
+    if (!isLocalFS()) {
+      FileSystem fs = FileSystem.get(getProxiedFSConf());
+      Path path = new Path(getProxiedFSTestDir(), "foo.txt");
+      OutputStream os = fs.create(path);
+      os.write(1);
+      os.close();
+      FileStatus status1 = fs.getFileStatus(path);
+      fs.close();
+      long at = status1.getAccessTime();
+      long mt = status1.getModificationTime();
+
+      fs = getHttpFSFileSystem();
+      fs.setTimes(path, mt - 10, at - 20);
+      fs.close();
+
+      fs = FileSystem.get(getProxiedFSConf());
+      status1 = fs.getFileStatus(path);
+      fs.close();
+      long atNew = status1.getAccessTime();
+      long mtNew = status1.getModificationTime();
+      Assert.assertEquals(mtNew, mt - 10);
+      Assert.assertEquals(atNew, at - 20);
+    }
   }
   }
 
 
   private void testSetPermission() throws Exception {
   private void testSetPermission() throws Exception {
-    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
-    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foodir");
+    FileSystem fs = FileSystem.get(getProxiedFSConf());
+    Path path = new Path(getProxiedFSTestDir(), "foodir");
     fs.mkdirs(path);
     fs.mkdirs(path);
 
 
-    fs = getHttpFileSystem();
+    fs = getHttpFSFileSystem();
     FsPermission permission1 = new FsPermission(FsAction.READ_WRITE, FsAction.NONE, FsAction.NONE);
     FsPermission permission1 = new FsPermission(FsAction.READ_WRITE, FsAction.NONE, FsAction.NONE);
     fs.setPermission(path, permission1);
     fs.setPermission(path, permission1);
     fs.close();
     fs.close();
 
 
-    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    fs = FileSystem.get(getProxiedFSConf());
     FileStatus status1 = fs.getFileStatus(path);
     FileStatus status1 = fs.getFileStatus(path);
     fs.close();
     fs.close();
     FsPermission permission2 = status1.getPermission();
     FsPermission permission2 = status1.getPermission();
     Assert.assertEquals(permission2, permission1);
     Assert.assertEquals(permission2, permission1);
 
 
-    //sticky bit 
-    fs = getHttpFileSystem();
+    //sticky bit
+    fs = getHttpFSFileSystem();
     permission1 = new FsPermission(FsAction.READ_WRITE, FsAction.NONE, FsAction.NONE, true);
     permission1 = new FsPermission(FsAction.READ_WRITE, FsAction.NONE, FsAction.NONE, true);
     fs.setPermission(path, permission1);
     fs.setPermission(path, permission1);
     fs.close();
     fs.close();
 
 
-    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    fs = FileSystem.get(getProxiedFSConf());
     status1 = fs.getFileStatus(path);
     status1 = fs.getFileStatus(path);
     fs.close();
     fs.close();
     permission2 = status1.getPermission();
     permission2 = status1.getPermission();
@@ -348,70 +369,76 @@ public class TestHttpFSFileSystem extends HFSTestCase {
   }
   }
 
 
   private void testSetOwner() throws Exception {
   private void testSetOwner() throws Exception {
-    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
-    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
-    OutputStream os = fs.create(path);
-    os.write(1);
-    os.close();
-    fs.close();
-
-    fs = getHttpFileSystem();
-    String user = HadoopUsersConfTestHelper.getHadoopUsers()[1];
-    String group = HadoopUsersConfTestHelper.getHadoopUserGroups(user)[0];
-    fs.setOwner(path, user, group);
-    fs.close();
-
-    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
-    FileStatus status1 = fs.getFileStatus(path);
-    fs.close();
-    Assert.assertEquals(status1.getOwner(), user);
-    Assert.assertEquals(status1.getGroup(), group);
+    if (!isLocalFS()) {
+      FileSystem fs = FileSystem.get(getProxiedFSConf());
+      fs.mkdirs(getProxiedFSTestDir());
+      Path path = new Path(getProxiedFSTestDir(), "foo.txt");
+      OutputStream os = fs.create(path);
+      os.write(1);
+      os.close();
+      fs.close();
+
+      fs = getHttpFSFileSystem();
+      String user = HadoopUsersConfTestHelper.getHadoopUsers()[1];
+      String group = HadoopUsersConfTestHelper.getHadoopUserGroups(user)[0];
+      fs.setOwner(path, user, group);
+      fs.close();
+
+      fs = FileSystem.get(getProxiedFSConf());
+      FileStatus status1 = fs.getFileStatus(path);
+      fs.close();
+      Assert.assertEquals(status1.getOwner(), user);
+      Assert.assertEquals(status1.getGroup(), group);
+    }
   }
   }
 
 
   private void testSetReplication() throws Exception {
   private void testSetReplication() throws Exception {
-    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
-    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
+    FileSystem fs = FileSystem.get(getProxiedFSConf());
+    Path path = new Path(getProxiedFSTestDir(), "foo.txt");
     OutputStream os = fs.create(path);
     OutputStream os = fs.create(path);
     os.write(1);
     os.write(1);
     os.close();
     os.close();
     fs.close();
     fs.close();
     fs.setReplication(path, (short) 2);
     fs.setReplication(path, (short) 2);
 
 
-    fs = getHttpFileSystem();
+    fs = getHttpFSFileSystem();
     fs.setReplication(path, (short) 1);
     fs.setReplication(path, (short) 1);
     fs.close();
     fs.close();
 
 
-    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    fs = FileSystem.get(getProxiedFSConf());
     FileStatus status1 = fs.getFileStatus(path);
     FileStatus status1 = fs.getFileStatus(path);
     fs.close();
     fs.close();
     Assert.assertEquals(status1.getReplication(), (short) 1);
     Assert.assertEquals(status1.getReplication(), (short) 1);
   }
   }
 
 
   private void testChecksum() throws Exception {
   private void testChecksum() throws Exception {
-    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
-    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
-    OutputStream os = fs.create(path);
-    os.write(1);
-    os.close();
-    FileChecksum hdfsChecksum = fs.getFileChecksum(path);
-    fs.close();
-    fs = getHttpFileSystem();
-    FileChecksum httpChecksum = fs.getFileChecksum(path);
-    fs.close();
-    Assert.assertEquals(httpChecksum.getAlgorithmName(), hdfsChecksum.getAlgorithmName());
-    Assert.assertEquals(httpChecksum.getLength(), hdfsChecksum.getLength());
-    Assert.assertArrayEquals(httpChecksum.getBytes(), hdfsChecksum.getBytes());
+    if (!isLocalFS()) {
+      FileSystem fs = FileSystem.get(getProxiedFSConf());
+      fs.mkdirs(getProxiedFSTestDir());
+      Path path = new Path(getProxiedFSTestDir(), "foo.txt");
+      OutputStream os = fs.create(path);
+      os.write(1);
+      os.close();
+      FileChecksum hdfsChecksum = fs.getFileChecksum(path);
+      fs.close();
+      fs = getHttpFSFileSystem();
+      FileChecksum httpChecksum = fs.getFileChecksum(path);
+      fs.close();
+      Assert.assertEquals(httpChecksum.getAlgorithmName(), hdfsChecksum.getAlgorithmName());
+      Assert.assertEquals(httpChecksum.getLength(), hdfsChecksum.getLength());
+      Assert.assertArrayEquals(httpChecksum.getBytes(), hdfsChecksum.getBytes());
+    }
   }
   }
 
 
   private void testContentSummary() throws Exception {
   private void testContentSummary() throws Exception {
-    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
-    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
+    FileSystem fs = FileSystem.get(getProxiedFSConf());
+    Path path = new Path(getProxiedFSTestDir(), "foo.txt");
     OutputStream os = fs.create(path);
     OutputStream os = fs.create(path);
     os.write(1);
     os.write(1);
     os.close();
     os.close();
     ContentSummary hdfsContentSummary = fs.getContentSummary(path);
     ContentSummary hdfsContentSummary = fs.getContentSummary(path);
     fs.close();
     fs.close();
-    fs = getHttpFileSystem();
+    fs = getHttpFSFileSystem();
     ContentSummary httpContentSummary = fs.getContentSummary(path);
     ContentSummary httpContentSummary = fs.getContentSummary(path);
     fs.close();
     fs.close();
     Assert.assertEquals(httpContentSummary.getDirectoryCount(), hdfsContentSummary.getDirectoryCount());
     Assert.assertEquals(httpContentSummary.getDirectoryCount(), hdfsContentSummary.getDirectoryCount());
@@ -484,13 +511,13 @@ public class TestHttpFSFileSystem extends HFSTestCase {
       ops[i] = new Object[]{Operation.values()[i]};
       ops[i] = new Object[]{Operation.values()[i]};
     }
     }
     //To test one or a subset of operations do:
     //To test one or a subset of operations do:
-    //return Arrays.asList(new Object[][]{ new Object[]{Operation.OPEN}});
+    //return Arrays.asList(new Object[][]{ new Object[]{Operation.APPEND}});
     return Arrays.asList(ops);
     return Arrays.asList(ops);
   }
   }
 
 
   private Operation operation;
   private Operation operation;
 
 
-  public TestHttpFSFileSystem(Operation operation) {
+  public BaseTestHttpFSWith(Operation operation) {
     this.operation = operation;
     this.operation = operation;
   }
   }
 
 

+ 3 - 8
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestWebhdfsFileSystem.java → hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFWithWebhdfsFileSystem.java

@@ -18,20 +18,15 @@
 
 
 package org.apache.hadoop.fs.http.client;
 package org.apache.hadoop.fs.http.client;
 
 
-import java.net.URI;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
 import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
-import org.apache.hadoop.test.TestJettyHelper;
-import org.junit.Assert;
 import org.junit.runner.RunWith;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized;
 
 
 @RunWith(value = Parameterized.class)
 @RunWith(value = Parameterized.class)
-public class TestWebhdfsFileSystem extends TestHttpFSFileSystem {
+public class TestHttpFSFWithWebhdfsFileSystem
+  extends TestHttpFSWithHttpFSFileSystem {
 
 
-  public TestWebhdfsFileSystem(TestHttpFSFileSystem.Operation operation) {
+  public TestHttpFSFWithWebhdfsFileSystem(Operation operation) {
     super(operation);
     super(operation);
   }
   }
 
 

+ 82 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystemLocalFileSystem.java

@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.http.client;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.test.TestDirHelper;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.File;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+@RunWith(value = Parameterized.class)
+public class TestHttpFSFileSystemLocalFileSystem extends BaseTestHttpFSWith {
+
+  private static String PATH_PREFIX;
+
+  static {
+    new TestDirHelper();
+    String prefix =
+      System.getProperty("test.build.dir", "target/test-dir") + "/local";
+    File file = new File(prefix);
+    file.mkdirs();
+    PATH_PREFIX = file.getAbsolutePath();
+  }
+
+  public TestHttpFSFileSystemLocalFileSystem(Operation operation) {
+    super(operation);
+  }
+
+  protected Path getProxiedFSTestDir() {
+    return addPrefix(new Path(TestDirHelper.getTestDir().getAbsolutePath()));
+  }
+
+  protected String getProxiedFSURI() {
+    return "file:///";
+  }
+
+  protected Configuration getProxiedFSConf() {
+    Configuration conf = new Configuration(false);
+    conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, getProxiedFSURI());
+    return conf;
+  }
+
+  protected Path addPrefix(Path path) {
+    URI uri = path.toUri();
+    try {
+      if (uri.getAuthority() != null) {
+        uri = new URI(uri.getScheme(),
+                      uri.getAuthority(), PATH_PREFIX + uri.getPath());
+      }
+      else {
+        if (uri.getPath().startsWith("/")) {
+          uri = new URI(PATH_PREFIX + uri.getPath());
+        }
+      }
+    } catch (URISyntaxException ex) {
+      throw new RuntimeException("It should not happen: " + ex.toString(), ex);
+    }
+    return new Path(uri);
+  }
+
+}

+ 52 - 0
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSWithHttpFSFileSystem.java

@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.http.client;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.test.TestHdfsHelper;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+@RunWith(value = Parameterized.class)
+public class TestHttpFSWithHttpFSFileSystem extends BaseTestHttpFSWith {
+
+  public TestHttpFSWithHttpFSFileSystem(Operation operation) {
+    super(operation);
+  }
+
+  protected Class getFileSystemClass() {
+    return HttpFSFileSystem.class;
+  }
+
+  protected Path getProxiedFSTestDir() {
+    return TestHdfsHelper.getHdfsTestDir();
+  }
+
+  protected String getProxiedFSURI() {
+    return TestHdfsHelper.getHdfsConf().get(
+      CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
+  }
+
+  protected Configuration getProxiedFSConf() {
+    return TestHdfsHelper.getHdfsConf();
+  }
+
+}

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -366,6 +366,9 @@ Branch-2 ( Unreleased changes )
 
 
     HDFS-3667.  Add retry support to WebHdfsFileSystem.  (szetszwo)
     HDFS-3667.  Add retry support to WebHdfsFileSystem.  (szetszwo)
 
 
+    HDFS-3291. add test that covers HttpFS working w/ a non-HDFS Hadoop
+    filesystem (tucu)
+
   OPTIMIZATIONS
   OPTIMIZATIONS
 
 
     HDFS-2982. Startup performance suffers when there are many edit log
     HDFS-2982. Startup performance suffers when there are many edit log