Prechádzať zdrojové kódy

HADOOP-6318. Upgrade to Avro 1.2.0.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@829279 13f79535-47bb-0310-9956-ffa450edef68
Doug Cutting 15 rokov pred
rodič
commit
a13a0f3fba

+ 1 - 1
.eclipse.templates/.classpath

@@ -5,7 +5,7 @@
 	<classpathentry kind="src" path="src/test/core"/>
 	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
 	<classpathentry kind="var" path="ANT_HOME/lib/ant.jar"/>
-	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/avro-1.1.0.jar"/>
+	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/avro-1.2.0.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/commons-cli-1.2.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/commons-codec-1.3.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/commons-el-1.0.jar"/>

+ 2 - 0
CHANGES.txt

@@ -30,6 +30,8 @@ Trunk (unreleased changes)
     HADOOP-6326. Hundson runs should check for AspectJ warnings and report
     failure if any is present (cos)
 
+    HADOOP-6318. Upgrade to Avro 1.2.0.  (cutting)
+
   OPTIMIZATIONS
 
   BUG FIXES

+ 1 - 1
ivy/libraries.properties

@@ -16,7 +16,7 @@
 #These are the versions of our dependencies (in alphabetical order)
 apacheant.version=1.7.0
 
-avro.version=1.1.0
+avro.version=1.2.0
 
 checkstyle.version=4.2
 

+ 1 - 8
src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java

@@ -78,14 +78,7 @@ public class AvroReflectSerialization extends AvroSerialization<Object>{
   @Override
   protected DatumReader getReader(Map<String, String> metadata) {
     try {
-      Class<SpecificRecord> clazz = (Class<SpecificRecord>)
-        getClassFromMetadata(metadata);
-      String prefix =  
-        ((clazz.getEnclosingClass() == null 
-            || "null".equals(clazz.getEnclosingClass().getName())) ? 
-              clazz.getPackage().getName() + "." 
-              : (clazz.getEnclosingClass().getName() + "$"));
-      return new ReflectDatumReader(ReflectData.get().getSchema(clazz), prefix);
+      return new ReflectDatumReader(getClassFromMetadata(metadata));
     } catch (Exception e) {
       throw new RuntimeException(e);
     }

+ 1 - 3
src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java

@@ -48,9 +48,7 @@ public class AvroSpecificSerialization
   @Override
   protected DatumReader getReader(Map<String, String> metadata) {
     try {
-      Class<SpecificRecord> clazz = (Class<SpecificRecord>)
-        getClassFromMetadata(metadata);
-      return new SpecificDatumReader(clazz.newInstance().getSchema());
+      return new SpecificDatumReader(getClassFromMetadata(metadata));
     } catch (Exception e) {
       throw new RuntimeException(e);
     }

+ 3 - 33
src/java/org/apache/hadoop/ipc/AvroRpc.java

@@ -114,13 +114,6 @@ public class AvroRpc {
     public void close() throws IOException {}
   }
     
-  private static class Invoker extends ReflectRequestor {
-    public Invoker(Protocol protocol, Transceiver transceiver)
-      throws IOException {
-      super(protocol, transceiver);
-    }
-  }
-
   /** Construct a client-side proxy object that implements the named protocol,
    * talking to a server at the named address. */
   public static Object getProxy(Class<?> protocol,
@@ -150,36 +143,14 @@ public class AvroRpc {
        new InvocationHandler() {
          public Object invoke(Object proxy, Method method, Object[] args) 
            throws Throwable {
-           return new Invoker
-             (ReflectData.get().getProtocol(protocol),
+           return new ReflectRequestor
+             (protocol,
               new ClientTransceiver(addr, ticket, conf, factory))
              .invoke(proxy, method, args);
          }
        });
   }
 
-  /** An Avro RPC Transceiver that provides a request passed through Hadoop RPC
-   * to the Avro RPC Responder for processing. */
-  private static class ServerTransceiver extends Transceiver {
-    List<ByteBuffer> request;
-    
-    public ServerTransceiver(List<ByteBuffer> request) {
-      this.request = request;
-    }
-
-    public String getRemoteName() { return "remote"; }
-
-    public List<ByteBuffer> readBuffers() throws IOException {
-      return request;
-    }
-
-    public void writeBuffers(List<ByteBuffer> buffers) throws IOException {
-      throw new UnsupportedOperationException();
-    }
-
-    public void close() throws IOException {}
-  }
-
   /** An Avro RPC Responder that can process requests passed via Hadoop RPC. */
   private static class TunnelResponder extends ReflectResponder
     implements TunnelProtocol {
@@ -195,8 +166,7 @@ public class AvroRpc {
 
     public BufferListWritable call(final BufferListWritable request)
       throws IOException {
-      return new BufferListWritable
-        (respond(new ServerTransceiver(request.buffers)));
+      return new BufferListWritable(respond(request.buffers));
     }
   }