浏览代码

MAPREDUCE-6605. Fix typos mapreduce.map.skip.proc.count.autoincr and mapreduce.reduce.skip.proc.count.autoincr in mapred-default.xml. Contributed by Kai Sasaki.

Akira Ajisaka 9 年之前
父节点
当前提交
7f215ffdd5

+ 4 - 0
hadoop-mapreduce-project/CHANGES.txt

@@ -399,6 +399,10 @@ Release 2.8.0 - UNRELEASED
 
     MAPREDUCE-6601. Fix typo in Job#setUseNewAPI. (Kai Sasaki via aajisaka)
 
+    MAPREDUCE-6605. Fix typos mapreduce.map.skip.proc.count.autoincr and
+    mapreduce.reduce.skip.proc.count.autoincr in mapred-default.xml.
+    (Kai Sasaki via aajisaka)
+
 Release 2.7.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/TestMapreduceConfigFields.java

@@ -75,8 +75,8 @@ public class TestMapreduceConfigFields extends TestConfigurationFieldsBase {
     // Obsolete entries listed in MAPREDUCE-6057 were removed from trunk
     // but not removed from branch-2.
     xmlPropsToSkipCompare.add("map.sort.class");
-    xmlPropsToSkipCompare.add("mapreduce.reduce.skip.proc.count.autoincr");
-    xmlPropsToSkipCompare.add("mapreduce.map.skip.proc.count.autoincr");
+    xmlPropsToSkipCompare.add("mapreduce.reduce.skip.proc-count.auto-incr");
+    xmlPropsToSkipCompare.add("mapreduce.map.skip.proc-count.auto-incr");
     xmlPropsToSkipCompare.add("mapreduce.local.clientfactory.class.name");
   }
 

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml

@@ -884,7 +884,7 @@
   </property>
 
   <property>
-    <name>mapreduce.map.skip.proc.count.autoincr</name>
+    <name>mapreduce.map.skip.proc-count.auto-incr</name>
     <value>true</value>
     <description> The flag which if set to true, 
     SkipBadRecords.COUNTER_MAP_PROCESSED_RECORDS is incremented 
@@ -896,7 +896,7 @@
   </property>
   
   <property>
-    <name>mapreduce.reduce.skip.proc.count.autoincr</name>
+    <name>mapreduce.reduce.skip.proc-count.auto-incr</name>
     <value>true</value>
     <description> The flag which if set to true, 
     SkipBadRecords.COUNTER_REDUCE_PROCESSED_GROUPS is incremented 

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/resources/job_1329348432655_0001_conf.xml

@@ -93,7 +93,7 @@
 <property><!--Loaded from job.xml--><name>mapreduce.reduce.input.buffer.percent</name><value>0.0</value></property>
 <property><!--Loaded from job.xml--><name>mapreduce.map.output.compress.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
 <property><!--Loaded from job.xml--><name>yarn.resourcemanager.delegation-token.keepalive-time-ms</name><value>300000</value></property>
-<property><!--Loaded from job.xml--><name>mapreduce.map.skip.proc.count.autoincr</name><value>true</value></property>
+<property><!--Loaded from job.xml--><name>mapreduce.map.skip.proc-count.auto-incr</name><value>true</value></property>
 <property><!--Loaded from job.xml--><name>dfs.datanode.directoryscan.threads</name><value>1</value></property>
 <property><!--Loaded from job.xml--><name>mapreduce.jobtracker.address</name><value>local</value></property>
 <property><!--Loaded from job.xml--><name>mapreduce.cluster.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
@@ -198,7 +198,7 @@
 <property><!--Loaded from job.xml--><name>dfs.block.access.key.update.interval</name><value>600</value></property>
 <property><!--Loaded from job.xml--><name>mapreduce.jobhistory.move.interval-ms</name><value>30000</value></property>
 <property><!--Loaded from job.xml--><name>dfs.datanode.dns.interface</name><value>default</value></property>
-<property><!--Loaded from job.xml--><name>mapreduce.reduce.skip.proc.count.autoincr</name><value>true</value></property>
+<property><!--Loaded from job.xml--><name>mapreduce.reduce.skip.proc-count.auto-incr</name><value>true</value></property>
 <property><!--Loaded from job.xml--><name>dfs.namenode.backup.http-address</name><value>0.0.0.0:50105</value></property>
 <property><!--Loaded from job.xml--><name>yarn.nodemanager.container-monitor.interval-ms</name><value>3000</value></property>
 <property><!--Loaded from job.xml--><name>mapred.reducer.new-api</name><value>true</value></property>

+ 4 - 4
hadoop-tools/hadoop-sls/src/main/data/2jobs2min-rumen-jh.json

@@ -4645,7 +4645,7 @@
     "dfs.ha.log-roll.period" : "120",
     "mapreduce.reduce.input.buffer.percent" : "0.0",
     "mapreduce.map.output.compress.codec" : "org.apache.hadoop.io.compress.SnappyCodec",
-    "mapreduce.map.skip.proc.count.autoincr" : "true",
+    "mapreduce.map.skip.proc-count.auto-incr" : "true",
     "dfs.client.failover.sleep.base.millis" : "500",
     "dfs.datanode.directoryscan.threads" : "1",
     "mapreduce.jobtracker.address" : "neededForHive:999999",
@@ -4765,7 +4765,7 @@
     "dfs.namenode.backup.address" : "0.0.0.0:50100",
     "hadoop.util.hash.type" : "murmur",
     "dfs.block.access.key.update.interval" : "600",
-    "mapreduce.reduce.skip.proc.count.autoincr" : "true",
+    "mapreduce.reduce.skip.proc-count.auto-incr" : "true",
     "dfs.datanode.dns.interface" : "default",
     "dfs.datanode.use.datanode.hostname" : "false",
     "mapreduce.job.output.key.class" : "org.apache.hadoop.io.Text",
@@ -9754,7 +9754,7 @@
     "dfs.ha.log-roll.period" : "120",
     "mapreduce.reduce.input.buffer.percent" : "0.0",
     "mapreduce.map.output.compress.codec" : "org.apache.hadoop.io.compress.SnappyCodec",
-    "mapreduce.map.skip.proc.count.autoincr" : "true",
+    "mapreduce.map.skip.proc-count.auto-incr" : "true",
     "dfs.client.failover.sleep.base.millis" : "500",
     "dfs.datanode.directoryscan.threads" : "1",
     "mapreduce.jobtracker.address" : "neededForHive:999999",
@@ -9874,7 +9874,7 @@
     "dfs.namenode.backup.address" : "0.0.0.0:50100",
     "hadoop.util.hash.type" : "murmur",
     "dfs.block.access.key.update.interval" : "600",
-    "mapreduce.reduce.skip.proc.count.autoincr" : "true",
+    "mapreduce.reduce.skip.proc-count.auto-incr" : "true",
     "dfs.datanode.dns.interface" : "default",
     "dfs.datanode.use.datanode.hostname" : "false",
     "mapreduce.job.output.key.class" : "org.apache.hadoop.io.Text",