hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r1231834 [3/7] - in /hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project: ./ bin/ conf/ hadoop-mapreduce-client/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/ja...
Date Mon, 16 Jan 2012 04:24:34 GMT
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/SingleCounterBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/SingleCounterBlock.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/SingleCounterBlock.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/SingleCounterBlock.java Mon Jan 16 04:24:24 2012
@@ -18,13 +18,18 @@
 
 package org.apache.hadoop.mapreduce.v2.app.webapp;
 
-import com.google.inject.Inject;
+import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.COUNTER_GROUP;
+import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.COUNTER_NAME;
+import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.JOB_ID;
+import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_ID;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._INFO_WRAP;
+
 import java.util.Map;
 import java.util.TreeMap;
 
-import org.apache.hadoop.mapreduce.v2.api.records.Counter;
-import org.apache.hadoop.mapreduce.v2.api.records.CounterGroup;
-import org.apache.hadoop.mapreduce.v2.api.records.Counters;
+import org.apache.hadoop.mapreduce.Counter;
+import org.apache.hadoop.mapreduce.CounterGroup;
+import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
@@ -40,8 +45,7 @@ import org.apache.hadoop.yarn.webapp.ham
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TR;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
 
-import static org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp.*;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.*;
+import com.google.inject.Inject;
 
 public class SingleCounterBlock extends HtmlBlock {
   protected TreeMap<String, Long> values = new TreeMap<String, Long>(); 
@@ -122,10 +126,10 @@ public class SingleCounterBlock extends 
         task.getAttempts().entrySet()) {
         long value = 0;
         Counters counters = entry.getValue().getCounters();
-        CounterGroup group = (counters != null)
-        		? counters.getCounterGroup($(COUNTER_GROUP)) : null;
+        CounterGroup group = (counters != null) ? counters
+          .getGroup($(COUNTER_GROUP)) : null;
         if(group != null)  {
-          Counter c = group.getCounter($(COUNTER_NAME));
+          Counter c = group.findCounter($(COUNTER_NAME));
           if(c != null) {
             value = c.getValue();
           }
@@ -140,9 +144,9 @@ public class SingleCounterBlock extends 
     for(Map.Entry<TaskId, Task> entry : tasks.entrySet()) {
       long value = 0;
       CounterGroup group = entry.getValue().getCounters()
-      .getCounterGroup($(COUNTER_GROUP));
+        .getGroup($(COUNTER_GROUP));
       if(group != null)  {
-        Counter c = group.getCounter($(COUNTER_NAME));
+        Counter c = group.findCounter($(COUNTER_NAME));
         if(c != null) {
           value = c.getValue();
         }

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AppInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AppInfo.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AppInfo.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AppInfo.java Mon Jan 16 04:24:24 2012
@@ -32,7 +32,6 @@ public class AppInfo {
   protected String appId;
   protected String name;
   protected String user;
-  protected String hostname;
   protected long startedOn;
   protected long elapsedTime;
 
@@ -44,7 +43,7 @@ public class AppInfo {
     this.name = context.getApplicationName().toString();
     this.user = context.getUser().toString();
     this.startedOn = context.getStartTime();
-    this.elapsedTime = Times.elapsed(context.getStartTime(), 0);
+    this.elapsedTime = Times.elapsed(this.startedOn, 0);
   }
 
   public String getId() {

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfInfo.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfInfo.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfInfo.java Mon Jan 16 04:24:24 2012
@@ -30,7 +30,7 @@ import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 
-@XmlRootElement
+@XmlRootElement(name = "conf")
 @XmlAccessorType(XmlAccessType.FIELD)
 public class ConfInfo {
 

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterGroupInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterGroupInfo.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterGroupInfo.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterGroupInfo.java Mon Jan 16 04:24:24 2012
@@ -24,8 +24,8 @@ import javax.xml.bind.annotation.XmlAcce
 import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlRootElement;
 
-import org.apache.hadoop.mapreduce.v2.api.records.Counter;
-import org.apache.hadoop.mapreduce.v2.api.records.CounterGroup;
+import org.apache.hadoop.mapreduce.Counter;
+import org.apache.hadoop.mapreduce.CounterGroup;
 
 @XmlRootElement(name = "counterGroup")
 @XmlAccessorType(XmlAccessType.FIELD)
@@ -38,14 +38,14 @@ public class CounterGroupInfo {
   public CounterGroupInfo() {
   }
 
-  public CounterGroupInfo(String name, CounterGroup g, CounterGroup mg,
+  public CounterGroupInfo(String name, CounterGroup group, CounterGroup mg,
       CounterGroup rg) {
     this.counterGroupName = name;
     this.counter = new ArrayList<CounterInfo>();
 
-    for (Counter c : g.getAllCounters().values()) {
-      Counter mc = mg == null ? null : mg.getCounter(c.getName());
-      Counter rc = rg == null ? null : rg.getCounter(c.getName());
+    for (Counter c : group) {
+      Counter mc = mg == null ? null : mg.findCounter(c.getName());
+      Counter rc = rg == null ? null : rg.findCounter(c.getName());
       CounterInfo cinfo = new CounterInfo(c, mc, rc);
       this.counter.add(cinfo);
     }

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterInfo.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterInfo.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterInfo.java Mon Jan 16 04:24:24 2012
@@ -21,13 +21,13 @@ import javax.xml.bind.annotation.XmlAcce
 import javax.xml.bind.annotation.XmlAccessorType;
 import javax.xml.bind.annotation.XmlRootElement;
 
-import org.apache.hadoop.mapreduce.v2.api.records.Counter;
+import org.apache.hadoop.mapreduce.Counter;
 
 @XmlRootElement
 @XmlAccessorType(XmlAccessType.FIELD)
 public class CounterInfo {
 
-  protected String counterName;
+  protected String name;
   protected long totalCounterValue;
   protected long mapCounterValue;
   protected long reduceCounterValue;
@@ -35,9 +35,9 @@ public class CounterInfo {
   public CounterInfo() {
   }
 
-  public CounterInfo(Counter counter, Counter mc, Counter rc) {
-    this.counterName = counter.getName();
-    this.totalCounterValue = counter.getValue();
+  public CounterInfo(Counter c, Counter mc, Counter rc) {
+    this.name = c.getName();
+    this.totalCounterValue = c.getValue();
     this.mapCounterValue = mc == null ? 0 : mc.getValue();
     this.reduceCounterValue = rc == null ? 0 : rc.getValue();
   }

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobCounterInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobCounterInfo.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobCounterInfo.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobCounterInfo.java Mon Jan 16 04:24:24 2012
@@ -25,13 +25,12 @@ import javax.xml.bind.annotation.XmlAcce
 import javax.xml.bind.annotation.XmlRootElement;
 import javax.xml.bind.annotation.XmlTransient;
 
-import org.apache.hadoop.mapreduce.v2.api.records.CounterGroup;
-import org.apache.hadoop.mapreduce.v2.api.records.Counters;
+import org.apache.hadoop.mapreduce.CounterGroup;
+import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
-import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 
 @XmlRootElement(name = "jobCounters")
@@ -46,52 +45,49 @@ public class JobCounterInfo {
   protected Counters reduce = null;
 
   protected String id;
-  protected ArrayList<CounterGroupInfo> counterGroups;
+  protected ArrayList<CounterGroupInfo> counterGroup;
 
   public JobCounterInfo() {
   }
 
   public JobCounterInfo(AppContext ctx, Job job) {
     getCounters(ctx, job);
-    counterGroups = new ArrayList<CounterGroupInfo>();
+    counterGroup = new ArrayList<CounterGroupInfo>();
     this.id = MRApps.toString(job.getID());
 
-    int numGroups = 0;
-
     if (total != null) {
-      for (CounterGroup g : total.getAllCounterGroups().values()) {
+      for (CounterGroup g : total) {
         if (g != null) {
-          CounterGroup mg = map == null ? null : map.getCounterGroup(g
-              .getName());
-          CounterGroup rg = reduce == null ? null : reduce.getCounterGroup(g
-              .getName());
-          ++numGroups;
-
-          CounterGroupInfo cginfo = new CounterGroupInfo(g.getName(), g, mg, rg);
-          counterGroups.add(cginfo);
+          CounterGroup mg = map == null ? null : map.getGroup(g.getName());
+          CounterGroup rg = reduce == null ? null : reduce
+            .getGroup(g.getName());
+
+          CounterGroupInfo cginfo = new CounterGroupInfo(g.getName(), g,
+            mg, rg);
+          counterGroup.add(cginfo);
         }
       }
     }
   }
 
   private void getCounters(AppContext ctx, Job job) {
-    total = JobImpl.newCounters();
+    total = new Counters();
     if (job == null) {
       return;
     }
-    map = JobImpl.newCounters();
-    reduce = JobImpl.newCounters();
+    map = new Counters();
+    reduce = new Counters();
     // Get all types of counters
     Map<TaskId, Task> tasks = job.getTasks();
     for (Task t : tasks.values()) {
       Counters counters = t.getCounters();
-      JobImpl.incrAllCounters(total, counters);
+      total.incrAllCounters(counters);
       switch (t.getType()) {
       case MAP:
-        JobImpl.incrAllCounters(map, counters);
+        map.incrAllCounters(counters);
         break;
       case REDUCE:
-        JobImpl.incrAllCounters(reduce, counters);
+        reduce.incrAllCounters(counters);
         break;
       }
     }

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java Mon Jan 16 04:24:24 2012
@@ -30,6 +30,7 @@ import javax.xml.bind.annotation.XmlTran
 
 import org.apache.hadoop.mapreduce.JobACL;
 import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
@@ -51,12 +52,12 @@ public class JobInfo {
   protected String id;
   protected String name;
   protected String user;
-  protected String state;
+  protected JobState state;
   protected int mapsTotal;
   protected int mapsCompleted;
-  protected float mapProgress;
   protected int reducesTotal;
   protected int reducesCompleted;
+  protected float mapProgress;
   protected float reduceProgress;
 
   @XmlTransient
@@ -83,18 +84,12 @@ public class JobInfo {
   protected int successfulMapAttempts = 0;
   protected ArrayList<ConfEntryInfo> acls;
 
-  @XmlTransient
-  protected int numMaps;
-  @XmlTransient
-  protected int numReduces;
-
   public JobInfo() {
   }
 
   public JobInfo(Job job, Boolean hasAccess) {
     this.id = MRApps.toString(job.getID());
     JobReport report = job.getReport();
-    countTasksAndAttempts(job);
     this.startTime = report.getStartTime();
     this.finishTime = report.getFinishTime();
     this.elapsedTime = Times.elapsed(this.startTime, this.finishTime);
@@ -103,7 +98,7 @@ public class JobInfo {
     }
     this.name = job.getName().toString();
     this.user = job.getUserName();
-    this.state = job.getState().toString();
+    this.state = job.getState();
     this.mapsTotal = job.getTotalMaps();
     this.mapsCompleted = job.getCompletedMaps();
     this.mapProgress = report.getMapProgress() * 100;
@@ -115,6 +110,9 @@ public class JobInfo {
 
     this.acls = new ArrayList<ConfEntryInfo>();
     if (hasAccess) {
+      this.diagnostics = "";
+      countTasksAndAttempts(job);
+
       this.uberized = job.isUber();
 
       List<String> diagnostics = job.getDiagnostics();
@@ -213,10 +211,10 @@ public class JobInfo {
   }
 
   public String getState() {
-    return this.state;
+    return this.state.toString();
   }
 
-  public String getUser() {
+  public String getUserName() {
     return this.user;
   }
 
@@ -267,13 +265,11 @@ public class JobInfo {
   /**
    * Go through a job and update the member variables with counts for
    * information to output in the page.
-   * 
+   *
    * @param job
    *          the job to get counts for.
    */
   private void countTasksAndAttempts(Job job) {
-    numReduces = 0;
-    numMaps = 0;
     final Map<TaskId, Task> tasks = job.getTasks();
     if (tasks == null) {
       return;

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskAttemptCounterInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskAttemptCounterInfo.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskAttemptCounterInfo.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskAttemptCounterInfo.java Mon Jan 16 04:24:24 2012
@@ -25,8 +25,8 @@ import javax.xml.bind.annotation.XmlAcce
 import javax.xml.bind.annotation.XmlRootElement;
 import javax.xml.bind.annotation.XmlTransient;
 
-import org.apache.hadoop.mapreduce.v2.api.records.CounterGroup;
-import org.apache.hadoop.mapreduce.v2.api.records.Counters;
+import org.apache.hadoop.mapreduce.CounterGroup;
+import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 
@@ -38,23 +38,22 @@ public class JobTaskAttemptCounterInfo {
   protected Counters total = null;
 
   protected String id;
-  protected ArrayList<TaskCounterGroupInfo> taskCounterGroups;
+  protected ArrayList<TaskCounterGroupInfo> taskAttemptCounterGroup;
 
   public JobTaskAttemptCounterInfo() {
   }
 
   public JobTaskAttemptCounterInfo(TaskAttempt taskattempt) {
 
-    long value = 0;
     this.id = MRApps.toString(taskattempt.getID());
     total = taskattempt.getCounters();
-    taskCounterGroups = new ArrayList<TaskCounterGroupInfo>();
+    taskAttemptCounterGroup = new ArrayList<TaskCounterGroupInfo>();
     if (total != null) {
-      for (CounterGroup g : total.getAllCounterGroups().values()) {
+      for (CounterGroup g : total) {
         if (g != null) {
           TaskCounterGroupInfo cginfo = new TaskCounterGroupInfo(g.getName(), g);
           if (cginfo != null) {
-            taskCounterGroups.add(cginfo);
+            taskAttemptCounterGroup.add(cginfo);
           }
         }
       }

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskCounterInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskCounterInfo.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskCounterInfo.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskCounterInfo.java Mon Jan 16 04:24:24 2012
@@ -25,8 +25,8 @@ import javax.xml.bind.annotation.XmlAcce
 import javax.xml.bind.annotation.XmlRootElement;
 import javax.xml.bind.annotation.XmlTransient;
 
-import org.apache.hadoop.mapreduce.v2.api.records.CounterGroup;
-import org.apache.hadoop.mapreduce.v2.api.records.Counters;
+import org.apache.hadoop.mapreduce.CounterGroup;
+import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 
@@ -38,7 +38,7 @@ public class JobTaskCounterInfo {
   protected Counters total = null;
 
   protected String id;
-  protected ArrayList<TaskCounterGroupInfo> taskCounterGroups;
+  protected ArrayList<TaskCounterGroupInfo> taskCounterGroup;
 
   public JobTaskCounterInfo() {
   }
@@ -46,12 +46,12 @@ public class JobTaskCounterInfo {
   public JobTaskCounterInfo(Task task) {
     total = task.getCounters();
     this.id = MRApps.toString(task.getID());
-    taskCounterGroups = new ArrayList<TaskCounterGroupInfo>();
+    taskCounterGroup = new ArrayList<TaskCounterGroupInfo>();
     if (total != null) {
-      for (CounterGroup g : total.getAllCounterGroups().values()) {
+      for (CounterGroup g : total) {
         if (g != null) {
           TaskCounterGroupInfo cginfo = new TaskCounterGroupInfo(g.getName(), g);
-          taskCounterGroups.add(cginfo);
+          taskCounterGroup.add(cginfo);
         }
       }
     }

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java Mon Jan 16 04:24:24 2012
@@ -25,6 +25,7 @@ import javax.xml.bind.annotation.XmlRoot
 import javax.xml.bind.annotation.XmlSeeAlso;
 import javax.xml.bind.annotation.XmlTransient;
 
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
@@ -43,7 +44,7 @@ public class TaskAttemptInfo {
   protected float progress;
   protected String id;
   protected String rack;
-  protected String state;
+  protected TaskAttemptState state;
   protected String nodeHttpAddress;
   protected String diagnostics;
   protected String type;
@@ -69,7 +70,7 @@ public class TaskAttemptInfo {
         .getAssignedContainerID());
     this.assignedContainer = ta.getAssignedContainerID();
     this.progress = ta.getProgress() * 100;
-    this.state = ta.getState().toString();
+    this.state = ta.getState();
     this.elapsedTime = Times
         .elapsed(this.startTime, this.finishTime, isRunning);
     if (this.elapsedTime == -1) {
@@ -95,7 +96,7 @@ public class TaskAttemptInfo {
   }
 
   public String getState() {
-    return this.state;
+    return this.state.toString();
   }
 
   public String getId() {

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptsInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptsInfo.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptsInfo.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptsInfo.java Mon Jan 16 04:24:24 2012
@@ -23,21 +23,21 @@ import javax.xml.bind.annotation.XmlAcce
 import javax.xml.bind.annotation.XmlAccessorType;
 import javax.xml.bind.annotation.XmlRootElement;
 
-@XmlRootElement(name = "taskattempts")
+@XmlRootElement(name = "taskAttempts")
 @XmlAccessorType(XmlAccessType.FIELD)
 public class TaskAttemptsInfo {
 
-  protected ArrayList<TaskAttemptInfo> taskattempt = new ArrayList<TaskAttemptInfo>();
+  protected ArrayList<TaskAttemptInfo> taskAttempt = new ArrayList<TaskAttemptInfo>();
 
   public TaskAttemptsInfo() {
   } // JAXB needs this
 
   public void add(TaskAttemptInfo taskattemptInfo) {
-    taskattempt.add(taskattemptInfo);
+    taskAttempt.add(taskattemptInfo);
   }
 
   public ArrayList<TaskAttemptInfo> getTaskAttempts() {
-    return taskattempt;
+    return taskAttempt;
   }
 
 }

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskCounterGroupInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskCounterGroupInfo.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskCounterGroupInfo.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskCounterGroupInfo.java Mon Jan 16 04:24:24 2012
@@ -24,8 +24,8 @@ import javax.xml.bind.annotation.XmlAcce
 import javax.xml.bind.annotation.XmlAccessorType;
 import javax.xml.bind.annotation.XmlRootElement;
 
-import org.apache.hadoop.mapreduce.v2.api.records.Counter;
-import org.apache.hadoop.mapreduce.v2.api.records.CounterGroup;
+import org.apache.hadoop.mapreduce.Counter;
+import org.apache.hadoop.mapreduce.CounterGroup;
 
 @XmlRootElement
 @XmlAccessorType(XmlAccessType.FIELD)
@@ -37,11 +37,11 @@ public class TaskCounterGroupInfo {
   public TaskCounterGroupInfo() {
   }
 
-  public TaskCounterGroupInfo(String name, CounterGroup g) {
+  public TaskCounterGroupInfo(String name, CounterGroup group) {
     this.counterGroupName = name;
     this.counter = new ArrayList<TaskCounterInfo>();
 
-    for (Counter c : g.getAllCounters().values()) {
+    for (Counter c : group) {
       TaskCounterInfo cinfo = new TaskCounterInfo(c.getName(), c.getValue());
       this.counter.add(cinfo);
     }

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskInfo.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskInfo.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskInfo.java Mon Jan 16 04:24:24 2012
@@ -24,6 +24,7 @@ import javax.xml.bind.annotation.XmlTran
 
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
@@ -39,7 +40,7 @@ public class TaskInfo {
   protected long elapsedTime;
   protected float progress;
   protected String id;
-  protected String state;
+  protected TaskState state;
   protected String type;
   protected String successfulAttempt;
 
@@ -62,7 +63,7 @@ public class TaskInfo {
     if (this.elapsedTime == -1) {
       this.elapsedTime = 0;
     }
-    this.state = report.getTaskState().toString();
+    this.state = report.getTaskState();
     this.progress = report.getProgress() * 100;
     this.id = MRApps.toString(task.getID());
     this.taskNum = task.getID().getId();
@@ -79,7 +80,7 @@ public class TaskInfo {
   }
 
   public String getState() {
-    return this.state;
+    return this.state.toString();
   }
 
   public String getId() {

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java Mon Jan 16 04:24:24 2012
@@ -19,6 +19,7 @@ package org.apache.hadoop.mapred;
 
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.verify;
@@ -43,7 +44,7 @@ public class TestTaskAttemptListenerImpl
     }
     
     @Override
-    protected void registerHeartbeatHandler() {
+    protected void registerHeartbeatHandler(Configuration conf) {
       //Empty
     }
 
@@ -79,21 +80,21 @@ public class TestTaskAttemptListenerImpl
     assertNotNull(result);
     assertTrue(result.shouldDie);
 
-    // Verify ask after registration but before launch
+    // Verify ask after registration but before launch. 
+    // Don't kill, should be null.
     TaskAttemptId attemptID = mock(TaskAttemptId.class);
     Task task = mock(Task.class);
     //Now put a task with the ID
     listener.registerPendingTask(task, wid);
     result = listener.getTask(context);
-    assertNotNull(result);
-    assertFalse(result.shouldDie);
+    assertNull(result);
     // Unregister for more testing.
     listener.unregister(attemptID, wid);
 
     // Verify ask after registration and launch
     //Now put a task with the ID
     listener.registerPendingTask(task, wid);
-    listener.registerLaunchedTask(attemptID);
+    listener.registerLaunchedTask(attemptID, wid);
     verify(hbHandler).register(attemptID);
     result = listener.getTask(context);
     assertNotNull(result);

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java Mon Jan 16 04:24:24 2012
@@ -324,7 +324,9 @@ public class MRApp extends MRAppMaster {
         return NetUtils.createSocketAddr("localhost:54321");
       }
       @Override
-      public void registerLaunchedTask(TaskAttemptId attemptID) {}
+      public void registerLaunchedTask(TaskAttemptId attemptID,
+          WrappedJvmID jvmID) {
+      }
       @Override
       public void unregister(TaskAttemptId attemptID, WrappedJvmID jvmID) {
       }
@@ -463,6 +465,7 @@ public class MRApp extends MRAppMaster {
       return localStateMachine;
     }
 
+    @SuppressWarnings("rawtypes")
     public TestJob(JobId jobId, ApplicationAttemptId applicationAttemptId,
         Configuration conf, EventHandler eventHandler,
         TaskAttemptListener taskAttemptListener, Clock clock,

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java Mon Jan 16 04:24:24 2012
@@ -1,41 +1,43 @@
 /**
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
 package org.apache.hadoop.mapreduce.v2.app;
 
-import com.google.common.collect.Iterators;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
 import java.util.Collection;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobACLsManager;
 import org.apache.hadoop.mapred.ShuffleHandler;
+import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.FileSystemCounter;
 import org.apache.hadoop.mapreduce.JobACL;
 import org.apache.hadoop.mapreduce.JobCounter;
+import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.TaskCounter;
+import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
-import org.apache.hadoop.mapreduce.v2.api.records.Counters;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
 import org.apache.hadoop.mapreduce.v2.api.records.JobState;
@@ -48,7 +50,6 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
-import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
@@ -63,33 +64,38 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.util.BuilderUtils;
 import org.apache.hadoop.yarn.util.Records;
 
+import com.google.common.collect.Iterators;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
 public class MockJobs extends MockApps {
-  static final Iterator<JobState> JOB_STATES = Iterators.cycle(
-      JobState.values());
-  static final Iterator<TaskState> TASK_STATES = Iterators.cycle(
-      TaskState.values());
-  static final Iterator<TaskAttemptState> TASK_ATTEMPT_STATES = Iterators.cycle(
-      TaskAttemptState.values());
-  static final Iterator<TaskType> TASK_TYPES = Iterators.cycle(
-      TaskType.values());
-  static final Iterator<JobCounter> JOB_COUNTERS = Iterators.cycle(
-      JobCounter.values());
-  static final Iterator<FileSystemCounter> FS_COUNTERS = Iterators.cycle(
-      FileSystemCounter.values());
-  static final Iterator<TaskCounter> TASK_COUNTERS = Iterators.cycle(
-      TaskCounter.values());
+  static final Iterator<JobState> JOB_STATES = Iterators.cycle(JobState
+      .values());
+  static final Iterator<TaskState> TASK_STATES = Iterators.cycle(TaskState
+      .values());
+  static final Iterator<TaskAttemptState> TASK_ATTEMPT_STATES = Iterators
+      .cycle(TaskAttemptState.values());
+  static final Iterator<TaskType> TASK_TYPES = Iterators.cycle(TaskType
+      .values());
+  static final Iterator<JobCounter> JOB_COUNTERS = Iterators.cycle(JobCounter
+      .values());
+  static final Iterator<FileSystemCounter> FS_COUNTERS = Iterators
+      .cycle(FileSystemCounter.values());
+  static final Iterator<TaskCounter> TASK_COUNTERS = Iterators
+      .cycle(TaskCounter.values());
   static final Iterator<String> FS_SCHEMES = Iterators.cycle("FILE", "HDFS",
       "LAFS", "CEPH");
-  static final Iterator<String> USER_COUNTER_GROUPS = Iterators.cycle(
-      "com.company.project.subproject.component.subcomponent.UserDefinedSpecificSpecialTask$Counters",
-      "PigCounters");
-  static final Iterator<String> USER_COUNTERS = Iterators.cycle(
-      "counter1", "counter2", "counter3");
+  static final Iterator<String> USER_COUNTER_GROUPS = Iterators
+      .cycle(
+          "com.company.project.subproject.component.subcomponent.UserDefinedSpecificSpecialTask$Counters",
+          "PigCounters");
+  static final Iterator<String> USER_COUNTERS = Iterators.cycle("counter1",
+      "counter2", "counter3");
   static final Iterator<Phase> PHASES = Iterators.cycle(Phase.values());
   static final Iterator<String> DIAGS = Iterators.cycle(
       "Error: java.lang.OutOfMemoryError: Java heap space",
       "Lost task tracker: tasktracker.domain/127.0.0.1:40879");
-  
+
   public static final String NM_HOST = "localhost";
   public static final int NM_PORT = 1234;
   public static final int NM_HTTP_PORT = 9999;
@@ -101,8 +107,7 @@ public class MockJobs extends MockApps {
   }
 
   public static Map<JobId, Job> newJobs(ApplicationId appID, int numJobsPerApp,
-                                        int numTasksPerJob,
-                                        int numAttemptsPerTask) {
+      int numTasksPerJob, int numAttemptsPerTask) {
     Map<JobId, Job> map = Maps.newHashMap();
     for (int j = 0; j < numJobsPerApp; ++j) {
       Job job = newJob(appID, j, numTasksPerJob, numAttemptsPerTask);
@@ -121,10 +126,12 @@ public class MockJobs extends MockApps {
   public static JobReport newJobReport(JobId id) {
     JobReport report = Records.newRecord(JobReport.class);
     report.setJobId(id);
-    report.setStartTime(System.currentTimeMillis() - (int)(Math.random() * DT));
-    report.setFinishTime(System.currentTimeMillis() + (int)(Math.random() * DT) + 1);
-    report.setMapProgress((float)Math.random());
-    report.setReduceProgress((float)Math.random());
+    report
+        .setStartTime(System.currentTimeMillis() - (int) (Math.random() * DT));
+    report.setFinishTime(System.currentTimeMillis()
+        + (int) (Math.random() * DT) + 1);
+    report.setMapProgress((float) Math.random());
+    report.setReduceProgress((float) Math.random());
     report.setJobState(JOB_STATES.next());
     return report;
   }
@@ -132,10 +139,12 @@ public class MockJobs extends MockApps {
   public static TaskReport newTaskReport(TaskId id) {
     TaskReport report = Records.newRecord(TaskReport.class);
     report.setTaskId(id);
-    report.setStartTime(System.currentTimeMillis() - (int)(Math.random() * DT));
-    report.setFinishTime(System.currentTimeMillis() + (int)(Math.random() * DT) + 1);
-    report.setProgress((float)Math.random());
-    report.setCounters(newCounters());
+    report
+        .setStartTime(System.currentTimeMillis() - (int) (Math.random() * DT));
+    report.setFinishTime(System.currentTimeMillis()
+        + (int) (Math.random() * DT) + 1);
+    report.setProgress((float) Math.random());
+    report.setCounters(TypeConverter.toYarn(newCounters()));
     report.setTaskState(TASK_STATES.next());
     return report;
   }
@@ -143,41 +152,41 @@ public class MockJobs extends MockApps {
   public static TaskAttemptReport newTaskAttemptReport(TaskAttemptId id) {
     TaskAttemptReport report = Records.newRecord(TaskAttemptReport.class);
     report.setTaskAttemptId(id);
-    report.setStartTime(System.currentTimeMillis() - (int)(Math.random() * DT));
-    report.setFinishTime(System.currentTimeMillis() + (int)(Math.random() * DT) + 1);
+    report
+        .setStartTime(System.currentTimeMillis() - (int) (Math.random() * DT));
+    report.setFinishTime(System.currentTimeMillis()
+        + (int) (Math.random() * DT) + 1);
     report.setPhase(PHASES.next());
     report.setTaskAttemptState(TASK_ATTEMPT_STATES.next());
-    report.setProgress((float)Math.random());
-    report.setCounters(newCounters());
+    report.setProgress((float) Math.random());
+    report.setCounters(TypeConverter.toYarn(newCounters()));
     return report;
   }
 
-  @SuppressWarnings("deprecation")
   public static Counters newCounters() {
-    org.apache.hadoop.mapred.Counters hc =
-        new org.apache.hadoop.mapred.Counters();
+    Counters hc = new Counters();
     for (JobCounter c : JobCounter.values()) {
-      hc.findCounter(c).setValue((long)(Math.random() * 1000));
+      hc.findCounter(c).setValue((long) (Math.random() * 1000));
     }
     for (TaskCounter c : TaskCounter.values()) {
-      hc.findCounter(c).setValue((long)(Math.random() * 1000));
+      hc.findCounter(c).setValue((long) (Math.random() * 1000));
     }
     int nc = FileSystemCounter.values().length * 4;
     for (int i = 0; i < nc; ++i) {
       for (FileSystemCounter c : FileSystemCounter.values()) {
-        hc.findCounter(FS_SCHEMES.next(), c).
-            setValue((long)(Math.random() * DT));
+        hc.findCounter(FS_SCHEMES.next(), c).setValue(
+            (long) (Math.random() * DT));
       }
     }
     for (int i = 0; i < 2 * 3; ++i) {
-      hc.findCounter(USER_COUNTER_GROUPS.next(), USER_COUNTERS.next()).
-          setValue((long)(Math.random() * 100000));
+      hc.findCounter(USER_COUNTER_GROUPS.next(), USER_COUNTERS.next())
+          .setValue((long) (Math.random() * 100000));
     }
-    return TypeConverter.toYarn(hc);
+    return hc;
   }
 
   public static Map<TaskAttemptId, TaskAttempt> newTaskAttempts(TaskId tid,
-                                                                int m) {
+      int m) {
     Map<TaskAttemptId, TaskAttempt> map = Maps.newHashMap();
     for (int i = 0; i < m; ++i) {
       TaskAttempt ta = newTaskAttempt(tid, i);
@@ -221,7 +230,10 @@ public class MockJobs extends MockApps {
 
       @Override
       public Counters getCounters() {
-        return report.getCounters();
+        if (report != null && report.getCounters() != null) {
+          return new Counters(TypeConverter.fromYarn(report.getCounters()));
+        }
+        return null;
       }
 
       @Override
@@ -237,9 +249,10 @@ public class MockJobs extends MockApps {
       @Override
       public boolean isFinished() {
         switch (report.getTaskAttemptState()) {
-          case SUCCEEDED:
-          case FAILED:
-          case KILLED: return true;
+        case SUCCEEDED:
+        case FAILED:
+        case KILLED:
+          return true;
         }
         return false;
       }
@@ -247,8 +260,8 @@ public class MockJobs extends MockApps {
       @Override
       public ContainerId getAssignedContainerID() {
         ContainerId id = Records.newRecord(ContainerId.class);
-        ApplicationAttemptId appAttemptId = 
-            Records.newRecord(ApplicationAttemptId.class);
+        ApplicationAttemptId appAttemptId = Records
+            .newRecord(ApplicationAttemptId.class);
         appAttemptId.setApplicationId(taid.getTaskId().getJobId().getAppId());
         appAttemptId.setAttemptId(0);
         id.setApplicationAttemptId(appAttemptId);
@@ -280,10 +293,10 @@ public class MockJobs extends MockApps {
         return 0;
       }
 
-	@Override
-	public String getNodeRackName() {
-		return "/default-rack";
-	}
+      @Override
+      public String getNodeRackName() {
+        return "/default-rack";
+      }
     };
   }
 
@@ -316,7 +329,8 @@ public class MockJobs extends MockApps {
 
       @Override
       public Counters getCounters() {
-        return report.getCounters();
+        return new Counters(
+          TypeConverter.fromYarn(report.getCounters()));
       }
 
       @Override
@@ -342,9 +356,10 @@ public class MockJobs extends MockApps {
       @Override
       public boolean isFinished() {
         switch (report.getTaskState()) {
-          case SUCCEEDED:
-          case KILLED:
-          case FAILED: return true;
+        case SUCCEEDED:
+        case KILLED:
+        case FAILED:
+          return true;
         }
         return false;
       }
@@ -361,8 +376,9 @@ public class MockJobs extends MockApps {
     };
   }
 
-  public static Counters getCounters(Collection<Task> tasks) {
-    Counters counters = JobImpl.newCounters();
+  public static Counters getCounters(
+      Collection<Task> tasks) {
+    Counters counters = new Counters();
     return JobImpl.incrTaskCounters(counters, tasks);
   }
 
@@ -398,12 +414,27 @@ public class MockJobs extends MockApps {
   }
 
   public static Job newJob(ApplicationId appID, int i, int n, int m) {
+    return newJob(appID, i, n, m, null);
+  }
+
+  public static Job newJob(ApplicationId appID, int i, int n, int m, Path confFile) {
     final JobId id = newJobID(appID, i);
     final String name = newJobName();
     final JobReport report = newJobReport(id);
     final Map<TaskId, Task> tasks = newTasks(id, n, m);
     final TaskCount taskCount = getTaskCount(tasks.values());
-    final Counters counters = getCounters(tasks.values());
+    final Counters counters = getCounters(tasks
+      .values());
+    final Path configFile = confFile;
+
+    Map<JobACL, AccessControlList> tmpJobACLs = new HashMap<JobACL, AccessControlList>();
+    Configuration conf = new Configuration();
+    conf.set(JobACL.VIEW_JOB.getAclName(), "testuser");
+    conf.setBoolean(MRConfig.MR_ACLS_ENABLED, true);
+
+    JobACLsManager aclsManager = new JobACLsManager(conf);
+    tmpJobACLs = aclsManager.constructJobACLs(conf);
+    final Map<JobACL, AccessControlList> jobACLs = tmpJobACLs;
     return new Job() {
       @Override
       public JobId getID() {
@@ -431,7 +462,7 @@ public class MockJobs extends MockApps {
       }
 
       @Override
-      public Counters getCounters() {
+      public Counters getAllCounters() {
         return counters;
       }
 
@@ -483,7 +514,7 @@ public class MockJobs extends MockApps {
 
       @Override
       public List<String> getDiagnostics() {
-        return Collections.<String>emptyList();
+        return Collections.<String> emptyList();
       }
 
       @Override
@@ -504,12 +535,12 @@ public class MockJobs extends MockApps {
 
       @Override
       public Path getConfFile() {
-        throw new UnsupportedOperationException("Not supported yet.");
+        return configFile;
       }
 
       @Override
       public Map<JobACL, AccessControlList> getJobACLs() {
-        return Collections.<JobACL, AccessControlList>emptyMap();
+        return jobACLs;
       }
 
       @Override
@@ -521,11 +552,10 @@ public class MockJobs extends MockApps {
       }
     };
   }
-  
+
   private static AMInfo createAMInfo(int attempt) {
-    ApplicationAttemptId appAttemptId =
-        BuilderUtils.newApplicationAttemptId(
-            BuilderUtils.newApplicationId(100, 1), attempt);
+    ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId(
+        BuilderUtils.newApplicationId(100, 1), attempt);
     ContainerId containerId = BuilderUtils.newContainerId(appAttemptId, 1);
     return MRBuilderUtils.newAMInfo(appAttemptId, System.currentTimeMillis(),
         containerId, NM_HOST, NM_PORT, NM_HTTP_PORT);

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java Mon Jan 16 04:24:24 2012
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.mapreduce.v2.app;
 
+import java.net.Proxy;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
@@ -71,6 +73,34 @@ public class TestJobEndNotifier extends 
       waitInterval == 5);
   }
 
+  private void testProxyConfiguration(Configuration conf) {
+    conf.set(MRJobConfig.MR_JOB_END_NOTIFICATION_PROXY, "somehost");
+    setConf(conf);
+    Assert.assertTrue("Proxy shouldn't be set because port wasn't specified",
+      proxyToUse.type() == Proxy.Type.DIRECT);
+    conf.set(MRJobConfig.MR_JOB_END_NOTIFICATION_PROXY, "somehost:someport");
+    setConf(conf);
+    Assert.assertTrue("Proxy shouldn't be set because port wasn't numeric",
+      proxyToUse.type() == Proxy.Type.DIRECT);
+    conf.set(MRJobConfig.MR_JOB_END_NOTIFICATION_PROXY, "somehost:1000");
+    setConf(conf);
+    Assert.assertTrue("Proxy should have been set but wasn't ",
+      proxyToUse.toString().equals("HTTP @ somehost:1000"));
+    conf.set(MRJobConfig.MR_JOB_END_NOTIFICATION_PROXY, "socks@somehost:1000");
+    setConf(conf);
+    Assert.assertTrue("Proxy should have been socks but wasn't ",
+      proxyToUse.toString().equals("SOCKS @ somehost:1000"));
+    conf.set(MRJobConfig.MR_JOB_END_NOTIFICATION_PROXY, "SOCKS@somehost:1000");
+    setConf(conf);
+    Assert.assertTrue("Proxy should have been socks but wasn't ",
+      proxyToUse.toString().equals("SOCKS @ somehost:1000"));
+    conf.set(MRJobConfig.MR_JOB_END_NOTIFICATION_PROXY, "sfafn@somehost:1000");
+    setConf(conf);
+    Assert.assertTrue("Proxy should have been http but wasn't ",
+      proxyToUse.toString().equals("HTTP @ somehost:1000"));
+    
+  }
+
   /**
    * Test that setting parameters has the desired effect
    */
@@ -79,6 +109,7 @@ public class TestJobEndNotifier extends 
     Configuration conf = new Configuration();
     testNumRetries(conf);
     testWaitInterval(conf);
+    testProxyConfiguration(conf);
   }
 
   protected int notificationCount = 0;

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java Mon Jan 16 04:24:24 2012
@@ -18,9 +18,6 @@
 
 package org.apache.hadoop.mapreduce.v2.app;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
@@ -30,11 +27,14 @@ import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.JobACL;
 import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
-import org.apache.hadoop.mapreduce.v2.api.records.Counters;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
 import org.apache.hadoop.mapreduce.v2.api.records.JobState;
@@ -46,13 +46,12 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
-import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType;
-import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus;
 import org.apache.hadoop.mapreduce.v2.app.speculate.DefaultSpeculator;
 import org.apache.hadoop.mapreduce.v2.app.speculate.ExponentiallySmoothedTaskRuntimeEstimator;
 import org.apache.hadoop.mapreduce.v2.app.speculate.LegacyTaskRuntimeEstimator;
@@ -74,7 +73,7 @@ import org.apache.hadoop.yarn.service.Co
 import org.junit.Assert;
 import org.junit.Test;
 
-
+@SuppressWarnings({"unchecked", "rawtypes"})
 public class TestRuntimeEstimators {
 
   private static int INITIAL_NUMBER_FREE_SLOTS = 600;
@@ -399,7 +398,7 @@ public class TestRuntimeEstimators {
     }
 
     @Override
-    public Counters getCounters() {
+    public Counters getAllCounters() {
       throw new UnsupportedOperationException("Not supported yet.");
     }
 

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java Mon Jan 16 04:24:24 2012
@@ -22,7 +22,6 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 
-import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.JobPriority;
 import org.apache.hadoop.mapred.TaskCompletionEvent;
@@ -45,14 +44,15 @@ import org.apache.hadoop.mapreduce.v2.ut
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.records.NodeReport;
 import org.apache.hadoop.yarn.api.records.QueueACL;
 import org.apache.hadoop.yarn.api.records.QueueState;
 import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 
+@SuppressWarnings("deprecation")
 public class TypeConverter {
 
   private static RecordFactory recordFactory;

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/MRClientProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/MRClientProtocol.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/MRClientProtocol.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/MRClientProtocol.java Mon Jan 16 04:24:24 2012
@@ -22,6 +22,8 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest;
@@ -54,4 +56,5 @@ public interface MRClientProtocol {
   public KillTaskResponse killTask(KillTaskRequest request) throws YarnRemoteException;
   public KillTaskAttemptResponse killTaskAttempt(KillTaskAttemptRequest request) throws YarnRemoteException;
   public FailTaskAttemptResponse failTaskAttempt(FailTaskAttemptRequest request) throws YarnRemoteException;
+  public GetDelegationTokenResponse getDelegationToken(GetDelegationTokenRequest request) throws YarnRemoteException;
 }

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/MRClientProtocolPBClientImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/MRClientProtocolPBClientImpl.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/MRClientProtocolPBClientImpl.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/MRClientProtocolPBClientImpl.java Mon Jan 16 04:24:24 2012
@@ -29,6 +29,8 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest;
@@ -51,6 +53,8 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.FailTaskAttemptResponsePBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetCountersRequestPBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetCountersResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDelegationTokenRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDelegationTokenResponsePBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDiagnosticsRequestPBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDiagnosticsResponsePBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetJobReportRequestPBImpl;
@@ -71,6 +75,7 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillTaskResponsePBImpl;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDelegationTokenRequestProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto;
@@ -214,7 +219,26 @@ public class MRClientProtocolPBClientImp
       }
     }
   }
-
+  
+  @Override
+  public GetDelegationTokenResponse getDelegationToken(
+      GetDelegationTokenRequest request) throws YarnRemoteException {
+    GetDelegationTokenRequestProto requestProto = ((GetDelegationTokenRequestPBImpl)
+        request).getProto();
+    try {
+      return new GetDelegationTokenResponsePBImpl(proxy.getDelegationToken(
+          null, requestProto));
+    } catch (ServiceException e) {
+      if (e.getCause() instanceof YarnRemoteException) {
+        throw (YarnRemoteException)e.getCause();
+      } else if (e.getCause() instanceof UndeclaredThrowableException) {
+        throw (UndeclaredThrowableException)e.getCause();
+      } else {
+        throw new UndeclaredThrowableException(e);
+      }
+    }
+  }
+  
   @Override
   public KillJobResponse killJob(KillJobRequest request)
       throws YarnRemoteException {

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/service/MRClientProtocolPBServiceImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/service/MRClientProtocolPBServiceImpl.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/service/MRClientProtocolPBServiceImpl.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/service/MRClientProtocolPBServiceImpl.java Mon Jan 16 04:24:24 2012
@@ -23,6 +23,8 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse;
@@ -44,6 +46,8 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.FailTaskAttemptResponsePBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetCountersRequestPBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetCountersResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDelegationTokenRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDelegationTokenResponsePBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDiagnosticsRequestPBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDiagnosticsResponsePBImpl;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetJobReportRequestPBImpl;
@@ -66,6 +70,8 @@ import org.apache.hadoop.mapreduce.v2.pr
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDelegationTokenRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDelegationTokenResponseProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto;
@@ -184,7 +190,20 @@ public class MRClientProtocolPBServiceIm
       throw new ServiceException(e);
     }
   }
-
+  
+  @Override
+  public GetDelegationTokenResponseProto getDelegationToken(
+      RpcController controller, GetDelegationTokenRequestProto proto)
+      throws ServiceException {
+    GetDelegationTokenRequest request = new GetDelegationTokenRequestPBImpl(proto);
+    try {
+      GetDelegationTokenResponse response = real.getDelegationToken(request);
+      return ((GetDelegationTokenResponsePBImpl)response).getProto();
+    } catch (YarnRemoteException e) {
+      throw new ServiceException(e);
+    }
+  }
+  
   @Override
   public KillJobResponseProto killJob(RpcController controller,
       KillJobRequestProto proto) throws ServiceException {

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/security/client/ClientHSSecurityInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/security/client/ClientHSSecurityInfo.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/security/client/ClientHSSecurityInfo.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/security/client/ClientHSSecurityInfo.java Mon Jan 16 04:24:24 2012
@@ -24,7 +24,9 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.security.SecurityInfo;
+import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.TokenInfo;
+import org.apache.hadoop.security.token.TokenSelector;
 import org.apache.hadoop.yarn.proto.MRClientProtocol;
 
 public class ClientHSSecurityInfo extends SecurityInfo {
@@ -56,7 +58,22 @@ public class ClientHSSecurityInfo extend
 
   @Override
   public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
-    return null;
-  }
+    if (!protocol
+        .equals(MRClientProtocol.MRClientProtocolService.BlockingInterface.class)) {
+      return null;
+    }
+    return new TokenInfo() {
+
+      @Override
+      public Class<? extends Annotation> annotationType() {
+        return null;
+      }
+
+      @Override
+      public Class<? extends TokenSelector<? extends TokenIdentifier>>
+          value() {
+        return ClientHSTokenSelector.class;
+      }
+    };  }
 
 }

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java Mon Jan 16 04:24:24 2012
@@ -27,6 +27,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.net.URI;
+import java.net.URL;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -181,23 +182,31 @@ public class MRApps extends Apps {
       String mrAppGeneratedClasspathFile = "mrapp-generated-classpath";
       classpathFileStream =
           thisClassLoader.getResourceAsStream(mrAppGeneratedClasspathFile);
+
       // Put the file itself on classpath for tasks.
-      String classpathElement = thisClassLoader.getResource(mrAppGeneratedClasspathFile).getFile();
-      if (classpathElement.contains("!")) {
-        classpathElement = classpathElement.substring(0, classpathElement.indexOf("!"));
+      URL classpathResource = thisClassLoader
+        .getResource(mrAppGeneratedClasspathFile);
+      if (classpathResource != null) {
+        String classpathElement = classpathResource.getFile();
+        if (classpathElement.contains("!")) {
+          classpathElement = classpathElement.substring(0,
+            classpathElement.indexOf("!"));
+        } else {
+          classpathElement = new File(classpathElement).getParent();
+        }
+        Apps.addToEnvironment(environment, Environment.CLASSPATH.name(),
+          classpathElement);
       }
-      else {
-        classpathElement = new File(classpathElement).getParent();
+
+      if (classpathFileStream != null) {
+        reader = new BufferedReader(new InputStreamReader(classpathFileStream));
+        String cp = reader.readLine();
+        if (cp != null) {
+          Apps.addToEnvironment(environment, Environment.CLASSPATH.name(),
+            cp.trim());
+        }
       }
-      Apps.addToEnvironment(
-          environment,
-          Environment.CLASSPATH.name(), classpathElement);
-
-      reader = new BufferedReader(new InputStreamReader(classpathFileStream));
-      String cp = reader.readLine();
-      if (cp != null) {
-        Apps.addToEnvironment(environment, Environment.CLASSPATH.name(), cp.trim());
-      }      
+
       // Add standard Hadoop classes
       for (String c : ApplicationConstants.APPLICATION_CLASSPATH) {
         Apps.addToEnvironment(environment, Environment.CLASSPATH.name(), c);

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/MRClientProtocol.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/MRClientProtocol.proto?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/MRClientProtocol.proto (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/MRClientProtocol.proto Mon Jan 16 04:24:24 2012
@@ -30,7 +30,7 @@ service MRClientProtocolService {
   rpc getTaskAttemptCompletionEvents (GetTaskAttemptCompletionEventsRequestProto) returns (GetTaskAttemptCompletionEventsResponseProto);
   rpc getTaskReports (GetTaskReportsRequestProto) returns (GetTaskReportsResponseProto);
   rpc getDiagnostics (GetDiagnosticsRequestProto) returns (GetDiagnosticsResponseProto);
-
+  rpc getDelegationToken (GetDelegationTokenRequestProto) returns (GetDelegationTokenResponseProto);
   rpc killJob (KillJobRequestProto) returns (KillJobResponseProto);
   rpc killTask (KillTaskRequestProto) returns (KillTaskResponseProto);
   rpc killTaskAttempt (KillTaskAttemptRequestProto) returns (KillTaskAttemptResponseProto);

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/mr_service_protos.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/mr_service_protos.proto?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/mr_service_protos.proto (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/mr_service_protos.proto Mon Jan 16 04:24:24 2012
@@ -22,6 +22,7 @@ option java_generic_services = true;
 option java_generate_equals_and_hash = true;
 
 import "mr_protos.proto";
+import "yarn_protos.proto";
 
 message GetJobReportRequestProto {
   optional JobIdProto job_id = 1;
@@ -75,6 +76,13 @@ message GetDiagnosticsResponseProto {
   repeated string diagnostics = 1;
 }
 
+message GetDelegationTokenRequestProto {
+  optional string renewer = 1;
+}
+
+message GetDelegationTokenResponseProto {
+  optional DelegationTokenProto m_r_delegation_token = 1;
+}
 
 message KillJobRequestProto {
   optional JobIdProto job_id = 1;

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java Mon Jan 16 04:24:24 2012
@@ -30,6 +30,8 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest;
@@ -123,28 +125,24 @@ public class TestRPCFactories {
     @Override
     public GetJobReportResponse getJobReport(GetJobReportRequest request)
         throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public GetTaskReportResponse getTaskReport(GetTaskReportRequest request)
         throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public GetTaskAttemptReportResponse getTaskAttemptReport(
         GetTaskAttemptReportRequest request) throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public GetCountersResponse getCounters(GetCountersRequest request)
         throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
@@ -152,51 +150,49 @@ public class TestRPCFactories {
     public GetTaskAttemptCompletionEventsResponse getTaskAttemptCompletionEvents(
         GetTaskAttemptCompletionEventsRequest request)
         throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public GetTaskReportsResponse getTaskReports(GetTaskReportsRequest request)
         throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public GetDiagnosticsResponse getDiagnostics(GetDiagnosticsRequest request)
         throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public KillJobResponse killJob(KillJobRequest request)
         throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public KillTaskResponse killTask(KillTaskRequest request)
         throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public KillTaskAttemptResponse killTaskAttempt(
         KillTaskAttemptRequest request) throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
 
     @Override
     public FailTaskAttemptResponse failTaskAttempt(
         FailTaskAttemptRequest request) throws YarnRemoteException {
-      // TODO Auto-generated method stub
       return null;
     }
-    
+
+    @Override
+    public GetDelegationTokenResponse getDelegationToken(
+        GetDelegationTokenRequest request) throws YarnRemoteException {
+      return null;
+    }   
   }
 }

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml Mon Jan 16 04:24:24 2012
@@ -54,7 +54,6 @@
       <plugin>
         <groupId>org.apache.avro</groupId>
         <artifactId>avro-maven-plugin</artifactId>
-        <version>1.5.3</version>
         <executions>
           <execution>
             <phase>generate-sources</phase>

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java Mon Jan 16 04:24:24 2012
@@ -140,7 +140,20 @@ import org.apache.hadoop.util.ToolRunner
 public class JobClient extends CLI {
   public static enum TaskStatusFilter { NONE, KILLED, FAILED, SUCCEEDED, ALL }
   private TaskStatusFilter taskOutputFilter = TaskStatusFilter.FAILED; 
-
+  /* notes that get delegation token was called. Again this is hack for oozie 
+   * to make sure we add history server delegation tokens to the credentials
+   *  for the job. Since the api only allows one delegation token to be returned, 
+   *  we have to add this hack.
+   */
+  private boolean getDelegationTokenCalled = false;
+  /* notes the renewer that will renew the delegation token */
+  private Text dtRenewer = null;
+  /* do we need a HS delegation token for this client */
+  static final String HS_DELEGATION_TOKEN_REQUIRED 
+      = "mapreduce.history.server.delegationtoken.required";
+  static final String HS_DELEGATION_TOKEN_RENEWER 
+      = "mapreduce.history.server.delegationtoken.renewer";
+  
   static{
     ConfigUtil.loadResources();
   }
@@ -584,6 +597,12 @@ public class JobClient extends CLI {
     try {
       conf.setBooleanIfUnset("mapred.mapper.new-api", false);
       conf.setBooleanIfUnset("mapred.reducer.new-api", false);
+      if (getDelegationTokenCalled) {
+        conf.setBoolean(HS_DELEGATION_TOKEN_REQUIRED, getDelegationTokenCalled);
+        getDelegationTokenCalled = false;
+        conf.set(HS_DELEGATION_TOKEN_RENEWER, dtRenewer.toString());
+        dtRenewer = null;
+      }
       Job job = clientUgi.doAs(new PrivilegedExceptionAction<Job> () {
         @Override
         public Job run() throws IOException, ClassNotFoundException, 
@@ -1012,11 +1031,25 @@ public class JobClient extends CLI {
     }
   }
 
-  private JobQueueInfo[] getJobQueueInfoArray(QueueInfo[] queues) 
-  throws IOException {
+  private JobQueueInfo getJobQueueInfo(QueueInfo queue) {
+    JobQueueInfo ret = new JobQueueInfo(queue);
+    // make sure to convert any children
+    if (queue.getQueueChildren().size() > 0) {
+      List<JobQueueInfo> childQueues = new ArrayList<JobQueueInfo>(queue
+          .getQueueChildren().size());
+      for (QueueInfo child : queue.getQueueChildren()) {
+        childQueues.add(getJobQueueInfo(child));
+      }
+      ret.setChildren(childQueues);
+    }
+    return ret;
+  }
+
+  private JobQueueInfo[] getJobQueueInfoArray(QueueInfo[] queues)
+      throws IOException {
     JobQueueInfo[] ret = new JobQueueInfo[queues.length];
     for (int i = 0; i < queues.length; i++) {
-      ret[i] = new JobQueueInfo(queues[i]);
+      ret[i] = getJobQueueInfo(queues[i]);
     }
     return ret;
   }
@@ -1168,6 +1201,8 @@ public class JobClient extends CLI {
    */
   public Token<DelegationTokenIdentifier> 
     getDelegationToken(final Text renewer) throws IOException, InterruptedException {
+    getDelegationTokenCalled = true;
+    dtRenewer = renewer;
     return clientUgi.doAs(new 
         PrivilegedExceptionAction<Token<DelegationTokenIdentifier>>() {
       public Token<DelegationTokenIdentifier> run() throws IOException, 



Mime
View raw message