|
@@ -19,6 +19,10 @@ package org.apache.hadoop.hdfs.nfs.nfs3;
|
|
|
|
|
|
import java.io.IOException;
|
|
import java.io.IOException;
|
|
import java.security.PrivilegedExceptionAction;
|
|
import java.security.PrivilegedExceptionAction;
|
|
|
|
+import java.util.ArrayList;
|
|
|
|
+import java.util.List;
|
|
|
|
+import java.util.Map.Entry;
|
|
|
|
+import java.util.concurrent.ConcurrentMap;
|
|
import java.util.concurrent.ExecutionException;
|
|
import java.util.concurrent.ExecutionException;
|
|
import java.util.concurrent.TimeUnit;
|
|
import java.util.concurrent.TimeUnit;
|
|
|
|
|
|
@@ -29,7 +33,9 @@ import org.apache.hadoop.fs.FSDataInputStream;
|
|
import org.apache.hadoop.hdfs.DFSClient;
|
|
import org.apache.hadoop.hdfs.DFSClient;
|
|
import org.apache.hadoop.hdfs.DFSInputStream;
|
|
import org.apache.hadoop.hdfs.DFSInputStream;
|
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
|
|
|
+import org.apache.hadoop.io.MultipleIOException;
|
|
import org.apache.hadoop.security.UserGroupInformation;
|
|
import org.apache.hadoop.security.UserGroupInformation;
|
|
|
|
+import org.apache.hadoop.util.ShutdownHookManager;
|
|
|
|
|
|
import com.google.common.annotations.VisibleForTesting;
|
|
import com.google.common.annotations.VisibleForTesting;
|
|
import com.google.common.base.Objects;
|
|
import com.google.common.base.Objects;
|
|
@@ -94,7 +100,7 @@ class DFSClientCache {
|
|
DFSClientCache(Configuration config) {
|
|
DFSClientCache(Configuration config) {
|
|
this(config, DEFAULT_DFS_CLIENT_CACHE_SIZE);
|
|
this(config, DEFAULT_DFS_CLIENT_CACHE_SIZE);
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
DFSClientCache(Configuration config, int clientCache) {
|
|
DFSClientCache(Configuration config, int clientCache) {
|
|
this.config = config;
|
|
this.config = config;
|
|
this.clientCache = CacheBuilder.newBuilder()
|
|
this.clientCache = CacheBuilder.newBuilder()
|
|
@@ -107,8 +113,52 @@ class DFSClientCache {
|
|
.expireAfterAccess(DEFAULT_DFS_INPUTSTREAM_CACHE_TTL, TimeUnit.SECONDS)
|
|
.expireAfterAccess(DEFAULT_DFS_INPUTSTREAM_CACHE_TTL, TimeUnit.SECONDS)
|
|
.removalListener(inputStreamRemovalListener())
|
|
.removalListener(inputStreamRemovalListener())
|
|
.build(inputStreamLoader());
|
|
.build(inputStreamLoader());
|
|
|
|
+
|
|
|
|
+ ShutdownHookManager.get().addShutdownHook(new CacheFinalizer(),
|
|
|
|
+ SHUTDOWN_HOOK_PRIORITY);
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ /**
|
|
|
|
+ * Priority of the FileSystem shutdown hook.
|
|
|
|
+ */
|
|
|
|
+ public static final int SHUTDOWN_HOOK_PRIORITY = 10;
|
|
|
|
+
|
|
|
|
+ private class CacheFinalizer implements Runnable {
|
|
|
|
+ @Override
|
|
|
|
+ public synchronized void run() {
|
|
|
|
+ try {
|
|
|
|
+ closeAll(true);
|
|
|
|
+ } catch (IOException e) {
|
|
|
|
+ LOG.info("DFSClientCache.closeAll() threw an exception:\n", e);
|
|
|
|
+ }
|
|
|
|
+ }
|
|
}
|
|
}
|
|
|
|
+
|
|
|
|
+ /**
|
|
|
|
+ * Close all DFSClient instances in the Cache.
|
|
|
|
+ * @param onlyAutomatic only close those that are marked for automatic closing
|
|
|
|
+ */
|
|
|
|
+ synchronized void closeAll(boolean onlyAutomatic) throws IOException {
|
|
|
|
+ List<IOException> exceptions = new ArrayList<IOException>();
|
|
|
|
|
|
|
|
+ ConcurrentMap<String, DFSClient> map = clientCache.asMap();
|
|
|
|
+
|
|
|
|
+ for (Entry<String, DFSClient> item : map.entrySet()) {
|
|
|
|
+ final DFSClient client = item.getValue();
|
|
|
|
+ if (client != null) {
|
|
|
|
+ try {
|
|
|
|
+ client.close();
|
|
|
|
+ } catch (IOException ioe) {
|
|
|
|
+ exceptions.add(ioe);
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ if (!exceptions.isEmpty()) {
|
|
|
|
+ throw MultipleIOException.createIOException(exceptions);
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
private CacheLoader<String, DFSClient> clientLoader() {
|
|
private CacheLoader<String, DFSClient> clientLoader() {
|
|
return new CacheLoader<String, DFSClient>() {
|
|
return new CacheLoader<String, DFSClient>() {
|
|
@Override
|
|
@Override
|