[prev in list] [next in list] [prev in thread] [next in thread]
List: hadoop-commits
Subject: svn commit: r1332461 - in /hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common: ./
From: szetszwo () apache ! org
Date: 2012-04-30 23:01:12
Message-ID: 20120430230114.23C85238897D () eris ! apache ! org
[Download RAW message or body]
Author: szetszwo
Date: Mon Apr 30 23:01:07 2012
New Revision: 1332461
URL: http://svn.apache.org/viewvc?rev=1332461&view=rev
Log:
Merge r1329944 through r1332459 from trunk.
Added:
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java
- copied unchanged from r1332459, \
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem
- copied unchanged from r1332459, \
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java
- copied unchanged from r1332459, \
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShutdownHookManager.java
- copied unchanged from r1332459, \
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShutdownHookManager.java
Modified:
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt \
(contents, props changed) \
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/docs/ \
(props changed) hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/ \
(props changed) hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KosmosFileSystem.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/core/ \
(props changed) hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt \
Mon Apr 30 23:01:07 2012 @@ -121,6 +121,9 @@ Trunk (unreleased changes)
HADOOP-7788. Add simple HealthMonitor class to watch an HAService (todd)
+ HADOOP-8312. testpatch.sh should provide a simpler way to see which
+ warnings changed (bobby)
+
OPTIMIZATIONS
HADOOP-7761. Improve the performance of raw comparisons. (todd)
@@ -269,6 +272,8 @@ Release 2.0.0 - UNRELEASED
HADOOP-8152. Expand public APIs for security library classes. (atm via eli)
+ HADOOP-7549. Use JDK ServiceLoader mechanism to find FileSystem implementations. \
(tucu) +
OPTIMIZATIONS
BUG FIXES
@@ -373,6 +378,18 @@ Release 2.0.0 - UNRELEASED
HADOOP-8309. Pseudo & Kerberos AuthenticationHandler should use
getType() to create token (tucu)
+ HADOOP-8314. HttpServer#hasAdminAccess should return false if
+ authorization is enabled but user is not authenticated. (tucu)
+
+ HADOOP-8296. hadoop/yarn daemonlog usage wrong (Devaraj K via tgraves)
+
+ HADOOP-8310. FileContext#checkPath should handle URIs with no port. (atm)
+
+ HADOOP-8321. TestUrlStreamHandler fails. (tucu)
+
+ HADOOP-8325. Add a ShutdownHookManager to be used by different
+ components instead of the JVM shutdownhook (tucu)
+
BREAKDOWN OF HADOOP-7454 SUBTASKS
HADOOP-7455. HA: Introduce HA Service Protocol Interface. (suresh)
@@ -469,6 +486,17 @@ Release 0.23.3 - UNRELEASED
HADOOP-8227. Allow RPC to limit ephemeral port range. (bobby)
+ HADOOP-8305. distcp over viewfs is broken (John George via bobby)
+
+ HADOOP-8334. HttpServer sometimes returns incorrect port (Daryn Sharp via
+ bobby)
+
+ HADOOP-8330. Update TestSequenceFile.testCreateUsesFsArg() for HADOOP-8305.
+ (John George via szetszwo)
+
+ HADOOP-8335. Improve Configuration's address handling (Daryn Sharp via
+ bobby)
+
Release 0.23.2 - UNRELEASED
INCOMPATIBLE CHANGES
Propchange: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1329944-1332459
Propchange: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1329944-1332459
Propchange: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1329944-1332459
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java \
Mon Apr 30 23:01:07 2012 @@ -1236,6 +1236,29 @@ public class Configuration implements \
It final String address = get(name, defaultAddress);
return NetUtils.createSocketAddr(address, defaultPort, name);
}
+
+ /**
+ * Set the socket address for the <code>name</code> property as
+ * a <code>host:port</code>.
+ */
+ public void setSocketAddr(String name, InetSocketAddress addr) {
+ set(name, NetUtils.getHostPortString(addr));
+ }
+
+ /**
+ * Set the socket address a client can use to connect for the
+ * <code>name</code> property as a <code>host:port</code>. The wildcard
+ * address is replaced with the local host's address.
+ * @param name property name.
+ * @param addr InetSocketAddress of a listener to store in the given property
+ * @return InetSocketAddress for clients to connect
+ */
+ public InetSocketAddress updateConnectAddr(String name,
+ InetSocketAddress addr) {
+ final InetSocketAddress connectAddr = NetUtils.getConnectAddress(addr);
+ setSocketAddr(name, connectAddr);
+ return connectAddr;
+ }
/**
* Load a class by name.
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java \
Mon Apr 30 23:01:07 2012 @@ -350,20 +350,23 @@ public abstract class \
AbstractFileSystem }
}
String thisScheme = this.getUri().getScheme();
- String thisAuthority = this.getUri().getAuthority();
+ String thisHost = this.getUri().getHost();
+ String thatHost = uri.getHost();
- // Schemes and authorities must match.
+ // Schemes and hosts must match.
// Allow for null Authority for file:///
if (!thisScheme.equalsIgnoreCase(thatScheme) ||
- (thisAuthority != null &&
- !thisAuthority.equalsIgnoreCase(thatAuthority)) ||
- (thisAuthority == null && thatAuthority != null)) {
+ (thisHost != null &&
+ !thisHost.equalsIgnoreCase(thatHost)) ||
+ (thisHost == null && thatHost != null)) {
throw new InvalidPathException("Wrong FS: " + path + ", expected: "
+ this.getUri());
}
+ // Ports must match, unless this FS instance is using the default port, in
+ // which case the port may be omitted from the given URI
int thisPort = this.getUri().getPort();
- int thatPort = path.toUri().getPort();
+ int thatPort = uri.getPort();
if (thatPort == -1) { // -1 => defaultPort of Uri scheme
thatPort = this.getUriDefaultPort();
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java \
Mon Apr 30 23:01:07 2012 @@ -54,6 +54,7 @@ import org.apache.hadoop.fs.InvalidPathE
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.util.ShutdownHookManager;
/**
* The FileContext class provides an interface to the application writer for
@@ -171,7 +172,12 @@ public final class FileContext {
public static final Log LOG = LogFactory.getLog(FileContext.class);
public static final FsPermission DEFAULT_PERM = FsPermission.getDefault();
-
+
+ /**
+ * Priority of the FileContext shutdown hook.
+ */
+ public static final int SHUTDOWN_HOOK_PRIORITY = 20;
+
/**
* List of files that should be deleted on JVM shutdown.
*/
@@ -1456,8 +1462,8 @@ public final class FileContext {
return false;
}
synchronized (DELETE_ON_EXIT) {
- if (DELETE_ON_EXIT.isEmpty() && !FINALIZER.isAlive()) {
- Runtime.getRuntime().addShutdownHook(FINALIZER);
+ if (DELETE_ON_EXIT.isEmpty()) {
+ ShutdownHookManager.get().addShutdownHook(FINALIZER, \
SHUTDOWN_HOOK_PRIORITY); }
Set<Path> set = DELETE_ON_EXIT.get(this);
@@ -2215,7 +2221,7 @@ public final class FileContext {
/**
* Deletes all the paths in deleteOnExit on JVM shutdown.
*/
- static class FileContextFinalizer extends Thread {
+ static class FileContextFinalizer implements Runnable {
public synchronized void run() {
processDeleteOnExit();
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java \
Mon Apr 30 23:01:07 2012 @@ -32,6 +32,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
+import java.util.ServiceLoader;
import java.util.Set;
import java.util.Stack;
import java.util.TreeSet;
@@ -54,6 +55,7 @@ import org.apache.hadoop.security.UserGr
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.ShutdownHookManager;
/****************************************************************
* An abstract base class for a fairly generic filesystem. It
@@ -83,6 +85,11 @@ public abstract class FileSystem extends
public static final Log LOG = LogFactory.getLog(FileSystem.class);
+ /**
+ * Priority of the FileSystem shutdown hook.
+ */
+ public static final int SHUTDOWN_HOOK_PRIORITY = 10;
+
/** FileSystem cache */
static final Cache CACHE = new Cache();
@@ -184,6 +191,17 @@ public abstract class FileSystem extends
statistics = getStatistics(name.getScheme(), getClass());
}
+ /**
+ * Return the protocol scheme for the FileSystem.
+ * <p/>
+ * This implementation throws an <code>UnsupportedOperationException</code>.
+ *
+ * @return the protocol scheme for the FileSystem.
+ */
+ public String getScheme() {
+ throw new UnsupportedOperationException("Not implemented by the FileSystem \
implementation"); + }
+
/** Returns a URI whose scheme and authority identify this FileSystem.*/
public abstract URI getUri();
@@ -2078,9 +2096,45 @@ public abstract class FileSystem extends
) throws IOException {
}
+ // making it volatile to be able to do a double checked locking
+ private volatile static boolean FILE_SYSTEMS_LOADED = false;
+
+ private static final Map<String, Class<? extends FileSystem>>
+ SERVICE_FILE_SYSTEMS = new HashMap<String, Class<? extends FileSystem>>();
+
+ private static void loadFileSystems() {
+ synchronized (FileSystem.class) {
+ if (!FILE_SYSTEMS_LOADED) {
+ ServiceLoader<FileSystem> serviceLoader = \
ServiceLoader.load(FileSystem.class); + for (FileSystem fs : serviceLoader) {
+ SERVICE_FILE_SYSTEMS.put(fs.getScheme(), fs.getClass());
+ }
+ FILE_SYSTEMS_LOADED = true;
+ }
+ }
+ }
+
+ public static Class<? extends FileSystem> getFileSystemClass(String scheme,
+ Configuration conf) throws IOException {
+ if (!FILE_SYSTEMS_LOADED) {
+ loadFileSystems();
+ }
+ Class<? extends FileSystem> clazz = null;
+ if (conf != null) {
+ clazz = (Class<? extends FileSystem>) conf.getClass("fs." + scheme + ".impl", \
null); + }
+ if (clazz == null) {
+ clazz = SERVICE_FILE_SYSTEMS.get(scheme);
+ }
+ if (clazz == null) {
+ throw new IOException("No FileSystem for scheme: " + scheme);
+ }
+ return clazz;
+ }
+
private static FileSystem createFileSystem(URI uri, Configuration conf
) throws IOException {
- Class<?> clazz = conf.getClass("fs." + uri.getScheme() + ".impl", null);
+ Class<?> clazz = getFileSystemClass(uri.getScheme(), conf);
if (clazz == null) {
throw new IOException("No FileSystem for scheme: " + uri.getScheme());
}
@@ -2128,8 +2182,8 @@ public abstract class FileSystem extends
}
// now insert the new file system into the map
- if (map.isEmpty() && !clientFinalizer.isAlive()) {
- Runtime.getRuntime().addShutdownHook(clientFinalizer);
+ if (map.isEmpty() ) {
+ ShutdownHookManager.get().addShutdownHook(clientFinalizer, \
SHUTDOWN_HOOK_PRIORITY); }
fs.key = key;
map.put(key, fs);
@@ -2144,11 +2198,8 @@ public abstract class FileSystem extends
if (map.containsKey(key) && fs == map.get(key)) {
map.remove(key);
toAutoClose.remove(key);
- if (map.isEmpty() && !clientFinalizer.isAlive()) {
- if (!Runtime.getRuntime().removeShutdownHook(clientFinalizer)) {
- LOG.info("Could not cancel cleanup thread, though no " +
- "FileSystems are open");
- }
+ if (map.isEmpty()) {
+ ShutdownHookManager.get().removeShutdownHook(clientFinalizer);
}
}
}
@@ -2194,7 +2245,7 @@ public abstract class FileSystem extends
}
}
- private class ClientFinalizer extends Thread {
+ private class ClientFinalizer implements Runnable {
public synchronized void run() {
try {
closeAll(true);
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java \
Mon Apr 30 23:01:07 2012 @@ -17,6 +17,7 @@
*/
package org.apache.hadoop.fs;
+import java.io.IOException;
import java.net.URLStreamHandlerFactory;
import java.util.HashMap;
import java.util.Map;
@@ -50,25 +51,23 @@ public class FsUrlStreamHandlerFactory i
private java.net.URLStreamHandler handler;
public FsUrlStreamHandlerFactory() {
- this.conf = new Configuration();
- // force the resolution of the configuration files
- // this is required if we want the factory to be able to handle
- // file:// URLs
- this.conf.getClass("fs.file.impl", null);
- this.handler = new FsUrlStreamHandler(this.conf);
+ this(new Configuration());
}
public FsUrlStreamHandlerFactory(Configuration conf) {
this.conf = new Configuration(conf);
- // force the resolution of the configuration files
- this.conf.getClass("fs.file.impl", null);
this.handler = new FsUrlStreamHandler(this.conf);
}
public java.net.URLStreamHandler createURLStreamHandler(String protocol) {
if (!protocols.containsKey(protocol)) {
- boolean known =
- (conf.getClass("fs." + protocol + ".impl", null) != null);
+ boolean known = true;
+ try {
+ FileSystem.getFileSystemClass(protocol, conf);
+ }
+ catch (IOException ex) {
+ known = false;
+ }
protocols.put(protocol, known);
}
if (protocols.get(protocol)) {
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java \
Mon Apr 30 23:01:07 2012 @@ -71,7 +71,18 @@ public class HarFileSystem extends Filte
*/
public HarFileSystem() {
}
-
+
+ /**
+ * Return the protocol scheme for the FileSystem.
+ * <p/>
+ *
+ * @return <code>har</code>
+ */
+ @Override
+ public String getScheme() {
+ return "har";
+ }
+
/**
* Constructor to create a HarFileSystem with an
* underlying filesystem.
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java \
Mon Apr 30 23:01:07 2012 @@ -39,7 +39,18 @@ public class LocalFileSystem extends Che
public LocalFileSystem() {
this(new RawLocalFileSystem());
}
-
+
+ /**
+ * Return the protocol scheme for the FileSystem.
+ * <p/>
+ *
+ * @return <code>file</code>
+ */
+ @Override
+ public String getScheme() {
+ return "file";
+ }
+
public FileSystem getRaw() {
return getRawFileSystem();
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java \
Mon Apr 30 23:01:07 2012 @@ -59,6 +59,17 @@ public class FTPFileSystem extends FileS
private URI uri;
+ /**
+ * Return the protocol scheme for the FileSystem.
+ * <p/>
+ *
+ * @return <code>ftp</code>
+ */
+ @Override
+ public String getScheme() {
+ return "ftp";
+ }
+
@Override
public void initialize(URI uri, Configuration conf) throws IOException { // get
super.initialize(uri, conf);
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KosmosFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KosmosFileSystem.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KosmosFileSystem.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KosmosFileSystem.java \
Mon Apr 30 23:01:07 2012 @@ -57,6 +57,17 @@ public class KosmosFileSystem extends Fi
this.kfsImpl = fsimpl;
}
+ /**
+ * Return the protocol scheme for the FileSystem.
+ * <p/>
+ *
+ * @return <code>kfs</code>
+ */
+ @Override
+ public String getScheme() {
+ return "kfs";
+ }
+
@Override
public URI getUri() {
return uri;
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java \
Mon Apr 30 23:01:07 2012 @@ -67,6 +67,17 @@ public class S3FileSystem extends FileSy
this.store = store;
}
+ /**
+ * Return the protocol scheme for the FileSystem.
+ * <p/>
+ *
+ * @return <code>s3</code>
+ */
+ @Override
+ public String getScheme() {
+ return "s3";
+ }
+
@Override
public URI getUri() {
return uri;
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java \
Mon Apr 30 23:01:07 2012 @@ -251,7 +251,18 @@ public class NativeS3FileSystem extends \
public NativeS3FileSystem(NativeFileSystemStore store) {
this.store = store;
}
-
+
+ /**
+ * Return the protocol scheme for the FileSystem.
+ * <p/>
+ *
+ * @return <code>s3n</code>
+ */
+ @Override
+ public String getScheme() {
+ return "s3n";
+ }
+
@Override
public void initialize(URI uri, Configuration conf) throws IOException {
super.initialize(uri, conf);
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java \
Mon Apr 30 23:01:07 2012 @@ -150,6 +150,17 @@ public class ViewFileSystem extends \
File }
/**
+ * Return the protocol scheme for the FileSystem.
+ * <p/>
+ *
+ * @return <code>viewfs</code>
+ */
+ @Override
+ public String getScheme() {
+ return "viewfs";
+ }
+
+ /**
* Called after a new FileSystem instance is constructed.
* @param theUri a uri whose authority section names the host, port, etc. for
* this FileSystem
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java \
Mon Apr 30 23:01:07 2012 @@ -636,80 +636,16 @@ public class HttpServer implements \
Filte
*/
public void start() throws IOException {
try {
- if(listenerStartedExternally) { // Expect that listener was started securely
- if(listener.getLocalPort() == -1) // ... and verify
- throw new Exception("Exepected webserver's listener to be started " +
- "previously but wasn't");
- // And skip all the port rolling issues.
+ try {
+ openListener();
+ LOG.info("Jetty bound to port " + listener.getLocalPort());
webServer.start();
- } else {
- int port = 0;
- int oriPort = listener.getPort(); // The original requested port
- while (true) {
- try {
- port = webServer.getConnectors()[0].getLocalPort();
- LOG.debug("Port returned by webServer.getConnectors()[0]." +
- "getLocalPort() before open() is "+ port +
- ". Opening the listener on " + oriPort);
- listener.open();
- port = listener.getLocalPort();
- LOG.debug("listener.getLocalPort() returned " + listener.getLocalPort() \
+
- " webServer.getConnectors()[0].getLocalPort() returned " +
- webServer.getConnectors()[0].getLocalPort());
- //Workaround to handle the problem reported in HADOOP-4744
- if (port < 0) {
- Thread.sleep(100);
- int numRetries = 1;
- while (port < 0) {
- LOG.warn("listener.getLocalPort returned " + port);
- if (numRetries++ > MAX_RETRIES) {
- throw new Exception(" listener.getLocalPort is returning " +
- "less than 0 even after " +numRetries+" resets");
- }
- for (int i = 0; i < 2; i++) {
- LOG.info("Retrying listener.getLocalPort()");
- port = listener.getLocalPort();
- if (port > 0) {
- break;
- }
- Thread.sleep(200);
- }
- if (port > 0) {
- break;
- }
- LOG.info("Bouncing the listener");
- listener.close();
- Thread.sleep(1000);
- listener.setPort(oriPort == 0 ? 0 : (oriPort += 1));
- listener.open();
- Thread.sleep(100);
- port = listener.getLocalPort();
- }
- } //Workaround end
- LOG.info("Jetty bound to port " + port);
- webServer.start();
- break;
- } catch (IOException ex) {
- // if this is a bind exception,
- // then try the next port number.
- if (ex instanceof BindException) {
- if (!findPort) {
- BindException be = new BindException(
- "Port in use: " + listener.getHost()
- + ":" + listener.getPort());
- be.initCause(ex);
- throw be;
- }
- } else {
- LOG.info("HttpServer.start() threw a non Bind IOException");
- throw ex;
- }
- } catch (MultiException ex) {
- LOG.info("HttpServer.start() threw a MultiException");
- throw ex;
- }
- listener.setPort((oriPort += 1));
- }
+ } catch (IOException ex) {
+ LOG.info("HttpServer.start() threw a non Bind IOException", ex);
+ throw ex;
+ } catch (MultiException ex) {
+ LOG.info("HttpServer.start() threw a MultiException", ex);
+ throw ex;
}
// Make sure there is no handler failures.
Handler[] handlers = webServer.getHandlers();
@@ -730,6 +666,52 @@ public class HttpServer implements Filte
}
/**
+ * Open the main listener for the server
+ * @throws Exception
+ */
+ void openListener() throws Exception {
+ if (listener.getLocalPort() != -1) { // it's already bound
+ return;
+ }
+ if (listenerStartedExternally) { // Expect that listener was started securely
+ throw new Exception("Expected webserver's listener to be started " +
+ "previously but wasn't");
+ }
+ int port = listener.getPort();
+ while (true) {
+ // jetty has a bug where you can't reopen a listener that previously
+ // failed to open w/o issuing a close first, even if the port is changed
+ try {
+ listener.close();
+ listener.open();
+ break;
+ } catch (BindException ex) {
+ if (port == 0 || !findPort) {
+ BindException be = new BindException(
+ "Port in use: " + listener.getHost() + ":" + listener.getPort());
+ be.initCause(ex);
+ throw be;
+ }
+ }
+ // try the next port number
+ listener.setPort(++port);
+ Thread.sleep(100);
+ }
+ }
+
+ /**
+ * Return the bind address of the listener.
+ * @return InetSocketAddress of the listener
+ */
+ public InetSocketAddress getListenerAddress() {
+ int port = listener.getLocalPort();
+ if (port == -1) { // not bound, return requested port
+ port = listener.getPort();
+ }
+ return new InetSocketAddress(listener.getHost(), port);
+ }
+
+ /**
* stop the server
*/
public void stop() throws Exception {
@@ -821,7 +803,10 @@ public class HttpServer implements Filte
String remoteUser = request.getRemoteUser();
if (remoteUser == null) {
- return true;
+ response.sendError(HttpServletResponse.SC_UNAUTHORIZED,
+ "Unauthenticated users are not " +
+ "authorized to access this page.");
+ return false;
}
AccessControlList adminsAcl = (AccessControlList) servletContext
.getAttribute(ADMINS_ACL);
@@ -830,9 +815,7 @@ public class HttpServer implements Filte
if (adminsAcl != null) {
if (!adminsAcl.isUserAllowed(remoteUserUGI)) {
response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User "
- + remoteUser + " is unauthorized to access this page. "
- + "AccessControlList for accessing this page : "
- + adminsAcl.toString());
+ + remoteUser + " is unauthorized to access this page.");
return false;
}
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java \
Mon Apr 30 23:01:07 2012 @@ -1050,9 +1050,9 @@ public class SequenceFile {
int bufferSize = bufferSizeOption == null ? getBufferSize(conf) :
bufferSizeOption.getValue();
short replication = replicationOption == null ?
- fs.getDefaultReplication() :
+ fs.getDefaultReplication(p) :
(short) replicationOption.getValue();
- long blockSize = blockSizeOption == null ? fs.getDefaultBlockSize() :
+ long blockSize = blockSizeOption == null ? fs.getDefaultBlockSize(p) :
blockSizeOption.getValue();
Progressable progress = progressOption == null ? null :
progressOption.getValue();
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java \
Mon Apr 30 23:01:07 2012 @@ -36,11 +36,9 @@ import org.apache.hadoop.util.ServletUti
*/
@InterfaceStability.Evolving
public class LogLevel {
- public static final String USAGES = "\nUSAGES:\n"
- + "java " + LogLevel.class.getName()
- + " -getlevel <host:port> <name>\n"
- + "java " + LogLevel.class.getName()
- + " -setlevel <host:port> <name> <level>\n";
+ public static final String USAGES = "\nUsage: General options are:\n"
+ + "\t[-getlevel <host:httpPort> <name>]\n"
+ + "\t[-setlevel <host:httpPort> <name> <level>]\n";
/**
* A command line implementation
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java \
Mon Apr 30 23:01:07 2012 @@ -351,8 +351,19 @@ public class NetUtils {
* @return socket address that a client can use to connect to the server.
*/
public static InetSocketAddress getConnectAddress(Server server) {
- InetSocketAddress addr = server.getListenerAddress();
- if (addr.getAddress().isAnyLocalAddress()) {
+ return getConnectAddress(server.getListenerAddress());
+ }
+
+ /**
+ * Returns the InetSocketAddress that a client can use to connect to the
+ * given listening address. This returns "hostname:port" of the server,
+ * or "127.0.0.1:port" when given a wildcard address of "0.0.0.0:port".
+ *
+ * @param addr of a listener
+ * @return socket address that a client can use to connect to the server.
+ */
+ public static InetSocketAddress getConnectAddress(InetSocketAddress addr) {
+ if (!addr.isUnresolved() && addr.getAddress().isAnyLocalAddress()) {
try {
addr = new InetSocketAddress(InetAddress.getLocalHost(), addr.getPort());
} catch (UnknownHostException uhe) {
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java \
Mon Apr 30 23:01:07 2012 @@ -51,6 +51,11 @@ public class RunJar {
public static final Pattern MATCH_ANY = Pattern.compile(".*");
/**
+ * Priority of the RunJar shutdown hook.
+ */
+ public static final int SHUTDOWN_HOOK_PRIORITY = 10;
+
+ /**
* Unpack a jar file into a directory.
*
* This version unpacks all files inside the jar regardless of filename.
@@ -167,11 +172,14 @@ public class RunJar {
}
ensureDirectory(workDir);
- Runtime.getRuntime().addShutdownHook(new Thread() {
+ ShutdownHookManager.get().addShutdownHook(
+ new Runnable() {
+ @Override
public void run() {
FileUtil.fullyDelete(workDir);
}
- });
+ }, SHUTDOWN_HOOK_PRIORITY);
+
unJar(file, workDir);
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java \
Mon Apr 30 23:01:07 2012 @@ -46,6 +46,11 @@ import org.apache.hadoop.net.NetUtils;
@InterfaceStability.Unstable
public class StringUtils {
+ /**
+ * Priority of the StringUtils shutdown hook.
+ */
+ public static final int SHUTDOWN_HOOK_PRIORITY = 0;
+
private static final DecimalFormat decimalFormat;
static {
NumberFormat numberFormat = \
NumberFormat.getNumberInstance(Locale.ENGLISH); @@ -600,12 +605,15 @@ public class \
StringUtils { )
);
- Runtime.getRuntime().addShutdownHook(new Thread() {
- public void run() {
- LOG.info(toStartupShutdownString("SHUTDOWN_MSG: ", new String[]{
- "Shutting down " + classname + " at " + hostname}));
- }
- });
+ ShutdownHookManager.get().addShutdownHook(
+ new Runnable() {
+ @Override
+ public void run() {
+ LOG.info(toStartupShutdownString("SHUTDOWN_MSG: ", new String[]{
+ "Shutting down " + classname + " at " + hostname}));
+ }
+ }, SHUTDOWN_HOOK_PRIORITY);
+
}
/**
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/main/resources/core-default.xml?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml \
Mon Apr 30 23:01:07 2012 @@ -353,25 +353,6 @@
</property>
<property>
- <name>fs.file.impl</name>
- <value>org.apache.hadoop.fs.LocalFileSystem</value>
- <description>The FileSystem for file: uris.</description>
-</property>
-
-<property>
- <name>fs.hdfs.impl</name>
- <value>org.apache.hadoop.hdfs.DistributedFileSystem</value>
- <description>The FileSystem for hdfs: uris.</description>
-</property>
-
-<property>
- <name>fs.viewfs.impl</name>
- <value>org.apache.hadoop.fs.viewfs.ViewFileSystem</value>
- <description>The FileSystem for view file system for viewfs: uris
- (ie client side mount table:).</description>
-</property>
-
-<property>
<name>fs.AbstractFileSystem.file.impl</name>
<value>org.apache.hadoop.fs.local.LocalFs</value>
<description>The AbstractFileSystem for file: uris.</description>
@@ -392,45 +373,6 @@
</property>
<property>
- <name>fs.s3.impl</name>
- <value>org.apache.hadoop.fs.s3.S3FileSystem</value>
- <description>The FileSystem for s3: uris.</description>
-</property>
-
-<property>
- <name>fs.s3n.impl</name>
- <value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value>
- <description>The FileSystem for s3n: (Native S3) uris.</description>
-</property>
-
-<property>
- <name>fs.kfs.impl</name>
- <value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value>
- <description>The FileSystem for kfs: uris.</description>
-</property>
-
-<property>
- <name>fs.hftp.impl</name>
- <value>org.apache.hadoop.hdfs.HftpFileSystem</value>
-</property>
-
-<property>
- <name>fs.hsftp.impl</name>
- <value>org.apache.hadoop.hdfs.HsftpFileSystem</value>
-</property>
-
-<property>
- <name>fs.webhdfs.impl</name>
- <value>org.apache.hadoop.hdfs.web.WebHdfsFileSystem</value>
-</property>
-
-<property>
- <name>fs.ftp.impl</name>
- <value>org.apache.hadoop.fs.ftp.FTPFileSystem</value>
- <description>The FileSystem for ftp: uris.</description>
-</property>
-
-<property>
<name>fs.ftp.host</name>
<value>0.0.0.0</value>
<description>FTP filesystem connects to this server</description>
@@ -445,18 +387,6 @@
</property>
<property>
- <name>fs.har.impl</name>
- <value>org.apache.hadoop.fs.HarFileSystem</value>
- <description>The filesystem for Hadoop archives. </description>
-</property>
-
-<property>
- <name>fs.har.impl.disable.cache</name>
- <value>true</value>
- <description>Don't cache 'har' filesystem instances.</description>
-</property>
-
-<property>
<name>fs.df.interval</name>
<value>60000</value>
<description>Disk usage statistics refresh interval in msec.</description>
Propchange: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1329944-1332459
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java \
Mon Apr 30 23:01:07 2012 @@ -23,6 +23,7 @@ import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.StringWriter;
+import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Arrays;
@@ -671,6 +672,27 @@ public class TestConfiguration extends T
}
}
+ public void testSetSocketAddress() throws IOException {
+ Configuration conf = new Configuration();
+ NetUtils.addStaticResolution("host", "127.0.0.1");
+ final String defaultAddr = "host:1";
+
+ InetSocketAddress addr = NetUtils.createSocketAddr(defaultAddr);
+ conf.setSocketAddr("myAddress", addr);
+ assertEquals(defaultAddr, NetUtils.getHostPortString(addr));
+ }
+
+ public void testUpdateSocketAddress() throws IOException {
+ InetSocketAddress addr = NetUtils.createSocketAddrForHost("host", 1);
+ InetSocketAddress connectAddr = conf.updateConnectAddr("myAddress", addr);
+ assertEquals(connectAddr.getHostName(), addr.getHostName());
+
+ addr = new InetSocketAddress(1);
+ connectAddr = conf.updateConnectAddr("myAddress", addr);
+ assertEquals(connectAddr.getHostName(),
+ InetAddress.getLocalHost().getHostName());
+ }
+
public void testReload() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java \
Mon Apr 30 23:01:07 2012 @@ -21,6 +21,7 @@ import java.io.IOException;
import java.util.Set;
import junit.framework.Assert;
+import org.apache.hadoop.util.ShutdownHookManager;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@@ -65,7 +66,7 @@ public class TestFileContextDeleteOnExit
checkDeleteOnExitData(1, fc, file1);
// Ensure shutdown hook is added
- Assert.assertTrue(Runtime.getRuntime().removeShutdownHook(FileContext.FINALIZER));
+ Assert.assertTrue(ShutdownHookManager.get().hasShutdownHook(FileContext.FINALIZER));
Path file2 = getTestRootPath(fc, "dir1/file2");
createFile(fc, file2, numBlocks, blockSize);
@@ -79,8 +80,7 @@ public class TestFileContextDeleteOnExit
// trigger deleteOnExit and ensure the registered
// paths are cleaned up
- FileContext.FINALIZER.start();
- FileContext.FINALIZER.join();
+ FileContext.FINALIZER.run();
checkDeleteOnExitData(0, fc, new Path[0]);
Assert.assertFalse(exists(fc, file1));
Assert.assertFalse(exists(fc, file2));
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java \
Mon Apr 30 23:01:07 2012 @@ -43,7 +43,7 @@ public class TestFileSystemCaching {
@Test
public void testCacheEnabled() throws Exception {
Configuration conf = new Configuration();
- conf.set("fs.cachedfile.impl", conf.get("fs.file.impl"));
+ conf.set("fs.cachedfile.impl", FileSystem.getFileSystemClass("file", \
null).getName()); FileSystem fs1 = FileSystem.get(new URI("cachedfile://a"), conf);
FileSystem fs2 = FileSystem.get(new URI("cachedfile://a"), conf);
assertSame(fs1, fs2);
@@ -84,7 +84,7 @@ public class TestFileSystemCaching {
// wait for InitializeForeverFileSystem to start initialization
InitializeForeverFileSystem.sem.acquire();
- conf.set("fs.cachedfile.impl", conf.get("fs.file.impl"));
+ conf.set("fs.cachedfile.impl", FileSystem.getFileSystemClass("file", \
null).getName()); FileSystem.get(new URI("cachedfile://a"), conf);
t.interrupt();
t.join();
@@ -93,7 +93,7 @@ public class TestFileSystemCaching {
@Test
public void testCacheDisabled() throws Exception {
Configuration conf = new Configuration();
- conf.set("fs.uncachedfile.impl", conf.get("fs.file.impl"));
+ conf.set("fs.uncachedfile.impl", FileSystem.getFileSystemClass("file", \
null).getName()); conf.setBoolean("fs.uncachedfile.impl.disable.cache", true);
FileSystem fs1 = FileSystem.get(new URI("uncachedfile://a"), conf);
FileSystem fs2 = FileSystem.get(new URI("uncachedfile://a"), conf);
@@ -104,7 +104,7 @@ public class TestFileSystemCaching {
@Test
public <T extends TokenIdentifier> void testCacheForUgi() throws Exception {
final Configuration conf = new Configuration();
- conf.set("fs.cachedfile.impl", conf.get("fs.file.impl"));
+ conf.set("fs.cachedfile.impl", FileSystem.getFileSystemClass("file", \
null).getName());
UserGroupInformation ugiA = UserGroupInformation.createRemoteUser("foo");
UserGroupInformation ugiB = UserGroupInformation.createRemoteUser("bar");
FileSystem fsA = ugiA.doAs(new PrivilegedExceptionAction<FileSystem>() {
@@ -156,7 +156,7 @@ public class TestFileSystemCaching {
@Test
public void testUserFS() throws Exception {
final Configuration conf = new Configuration();
- conf.set("fs.cachedfile.impl", conf.get("fs.file.impl"));
+ conf.set("fs.cachedfile.impl", FileSystem.getFileSystemClass("file", \
null).getName());
FileSystem fsU1 = FileSystem.get(new URI("cachedfile://a"), conf, "bar");
FileSystem fsU2 = FileSystem.get(new URI("cachedfile://a"), conf, "foo");
@@ -166,7 +166,7 @@ public class TestFileSystemCaching {
@Test
public void testFsUniqueness() throws Exception {
final Configuration conf = new Configuration();
- conf.set("fs.cachedfile.impl", conf.get("fs.file.impl"));
+ conf.set("fs.cachedfile.impl", FileSystem.getFileSystemClass("file", \
null).getName()); // multiple invocations of FileSystem.get return the same object.
FileSystem fs1 = FileSystem.get(conf);
FileSystem fs2 = FileSystem.get(conf);
@@ -183,7 +183,7 @@ public class TestFileSystemCaching {
@Test
public void testCloseAllForUGI() throws Exception {
final Configuration conf = new Configuration();
- conf.set("fs.cachedfile.impl", conf.get("fs.file.impl"));
+ conf.set("fs.cachedfile.impl", FileSystem.getFileSystemClass("file", \
null).getName());
UserGroupInformation ugiA = UserGroupInformation.createRemoteUser("foo");
FileSystem fsA = ugiA.doAs(new PrivilegedExceptionAction<FileSystem>() {
public FileSystem run() throws Exception {
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java \
Mon Apr 30 23:01:07 2012 @@ -165,7 +165,10 @@ public class TestFilterFileSystem {
public Token<?> getDelegationToken(String renewer) throws IOException {
return null;
}
-
+
+ public String getScheme() {
+ return "dontcheck";
+ }
}
@Test
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java \
Mon Apr 30 23:01:07 2012 @@ -100,6 +100,19 @@ public class HttpServerFunctionalTest \
ex }
/**
+ * Create an HttpServer instance on the given address for the given webapp
+ * @param host to bind
+ * @param port to bind
+ * @return the server
+ * @throws IOException if it could not be created
+ */
+ public static HttpServer createServer(String host, int port)
+ throws IOException {
+ prepareTestWebapp();
+ return new HttpServer(TEST, host, port, true);
+ }
+
+ /**
* Create an HttpServer instance for the given webapp
* @param webapp the webapp to work with
* @return the server
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java \
Mon Apr 30 23:01:07 2012 @@ -20,6 +20,7 @@ package org.apache.hadoop.http;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.HttpURLConnection;
+import java.net.InetSocketAddress;
import java.net.URL;
import java.util.Arrays;
import java.util.Enumeration;
@@ -35,6 +36,7 @@ import java.util.concurrent.Executors;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
@@ -53,10 +55,12 @@ import org.apache.hadoop.http.HttpServer
import org.apache.hadoop.http.resource.JerseyResource;
import org.apache.hadoop.security.Groups;
import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.mockito.Mock;
import org.mockito.Mockito;
import org.mortbay.util.ajax.JSON;
@@ -422,4 +426,96 @@ public class TestHttpServer extends Http
assertEquals("bar", m.get(JerseyResource.OP));
LOG.info("END testJersey()");
}
+
+ @Test
+ public void testHasAdministratorAccess() throws Exception {
+ Configuration conf = new Configuration();
+ conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false);
+ ServletContext context = Mockito.mock(ServletContext.class);
+ Mockito.when(context.getAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE)).thenReturn(conf);
+ Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(null);
+ HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+ Mockito.when(request.getRemoteUser()).thenReturn(null);
+ HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
+
+ //authorization OFF
+ Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, \
response)); +
+ //authorization ON & user NULL
+ response = Mockito.mock(HttpServletResponse.class);
+ conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true);
+ Assert.assertFalse(HttpServer.hasAdministratorAccess(context, request, \
response)); + Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), \
Mockito.anyString()); +
+ //authorization ON & user NOT NULL & ACLs NULL
+ response = Mockito.mock(HttpServletResponse.class);
+ Mockito.when(request.getRemoteUser()).thenReturn("foo");
+ Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, \
response)); +
+ //authorization ON & user NOT NULL & ACLs NOT NULL & user not in ACLs
+ response = Mockito.mock(HttpServletResponse.class);
+ AccessControlList acls = Mockito.mock(AccessControlList.class);
+ Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(false);
+ Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
+ Assert.assertFalse(HttpServer.hasAdministratorAccess(context, request, \
response)); + Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), \
Mockito.anyString()); +
+ //authorization ON & user NOT NULL & ACLs NOT NULL & user in in ACLs
+ response = Mockito.mock(HttpServletResponse.class);
+ Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(true);
+ Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
+ Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, \
response)); +
+ }
+
+ @Test public void testBindAddress() throws Exception {
+ checkBindAddress("0.0.0.0", 0, false).stop();
+ // hang onto this one for a bit more testing
+ HttpServer myServer = checkBindAddress("localhost", 0, false);
+ HttpServer myServer2 = null;
+ try {
+ int port = myServer.getListenerAddress().getPort();
+ // it's already in use, true = expect a higher port
+ myServer2 = checkBindAddress("localhost", port, true);
+ // try to reuse the port
+ port = myServer2.getListenerAddress().getPort();
+ myServer2.stop();
+ assertEquals(-1, myServer2.getPort()); // not bound
+ myServer2.openListener();
+ assertEquals(port, myServer2.getPort()); // expect same port
+ } finally {
+ myServer.stop();
+ if (myServer2 != null) {
+ myServer2.stop();
+ }
+ }
+ }
+
+ private HttpServer checkBindAddress(String host, int port, boolean findPort)
+ throws Exception {
+ HttpServer server = createServer(host, port);
+ try {
+ // not bound, ephemeral should return requested port (0 for ephemeral)
+ InetSocketAddress addr = server.getListenerAddress();
+ assertEquals(port, addr.getPort());
+ // verify hostname is what was given
+ server.openListener();
+ addr = server.getListenerAddress();
+ assertEquals(host, addr.getHostName());
+
+ int boundPort = addr.getPort();
+ if (port == 0) {
+ assertTrue(boundPort != 0); // ephemeral should now return bound port
+ } else if (findPort) {
+ assertTrue(boundPort > port);
+ // allow a little wiggle room to prevent random test failures if
+ // some consecutive ports are already in use
+ assertTrue(addr.getPort() - port < 8);
+ }
+ } catch (Exception e) {
+ server.stop();
+ throw e;
+ }
+ return server;
+ }
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java \
Mon Apr 30 23:01:07 2012 @@ -470,7 +470,7 @@ public class TestSequenceFile extends Te
SequenceFile.Writer writer = SequenceFile.createWriter(
spyFs, conf, p, NullWritable.class, NullWritable.class);
writer.close();
- Mockito.verify(spyFs).getDefaultReplication();
+ Mockito.verify(spyFs).getDefaultReplication(p);
}
private static class TestFSDataInputStream extends FSDataInputStream {
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-proje \
ct/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java?rev=1332461&r1=1332460&r2=1332461&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java \
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java \
Mon Apr 30 23:01:07 2012 @@ -170,6 +170,19 @@ public class TestNetUtils {
}
@Test
+ public void testGetConnectAddress() throws IOException {
+ NetUtils.addStaticResolution("host", "127.0.0.1");
+ InetSocketAddress addr = NetUtils.createSocketAddrForHost("host", 1);
+ InetSocketAddress connectAddr = NetUtils.getConnectAddress(addr);
+ assertEquals(addr.getHostName(), connectAddr.getHostName());
+
+ addr = new InetSocketAddress(1);
+ connectAddr = NetUtils.getConnectAddress(addr);
+ assertEquals(InetAddress.getLocalHost().getHostName(),
+ connectAddr.getHostName());
+ }
+
+ @Test
public void testCreateSocketAddress() throws Throwable {
InetSocketAddress addr = NetUtils.createSocketAddr(
"127.0.0.1:12345", 1000, "myconfig");
[prev in list] [next in list] [prev in thread] [next in thread]
Configure |
About |
News |
Add a list |
Sponsored by KoreLogic