hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r1231834 [4/7] - in /hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project: ./ bin/ conf/ hadoop-mapreduce-client/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/ja...
Date Mon, 16 Jan 2012 04:24:34 GMT
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueInfo.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueInfo.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueInfo.java Mon Jan 16 04:24:24 2012
@@ -105,7 +105,7 @@ public class JobQueueInfo extends QueueI
   public List<JobQueueInfo> getChildren() {
     List<JobQueueInfo> list = new ArrayList<JobQueueInfo>();
     for (QueueInfo q : super.getQueueChildren()) {
-      list.add(new JobQueueInfo(q));
+      list.add((JobQueueInfo)q);
     }
     return list;
   }

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java Mon Jan 16 04:24:24 2012
@@ -1030,7 +1030,7 @@ public class Job extends JobContextImpl 
   public void addFileToClassPath(Path file)
     throws IOException {
     ensureState(JobState.DEFINE);
-    DistributedCache.addFileToClassPath(file, conf);
+    DistributedCache.addFileToClassPath(file, conf, file.getFileSystem(conf));
   }
 
   /**
@@ -1045,7 +1045,7 @@ public class Job extends JobContextImpl 
   public void addArchiveToClassPath(Path archive)
     throws IOException {
     ensureState(JobState.DEFINE);
-    DistributedCache.addArchiveToClassPath(archive, conf);
+    DistributedCache.addArchiveToClassPath(archive, conf, archive.getFileSystem(conf));
   }
 
   /**

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java Mon Jan 16 04:24:24 2012
@@ -156,6 +156,8 @@ public interface MRJobConfig {
   
   public static final String TASK_TIMEOUT = "mapreduce.task.timeout";
 
+  public static final String TASK_TIMEOUT_CHECK_INTERVAL_MS = "mapreduce.task.timeout.check-interval-ms";
+  
   public static final String TASK_ID = "mapreduce.task.id";
 
   public static final String TASK_OUTPUT_DIR = "mapreduce.task.output.dir";
@@ -277,6 +279,12 @@ public interface MRJobConfig {
   public static final String JOB_ACL_MODIFY_JOB = "mapreduce.job.acl-modify-job";
 
   public static final String DEFAULT_JOB_ACL_MODIFY_JOB = " ";
+  
+  /* config for tracking the local file where all the credentials for the job
+   * credentials.
+   */
+  public static final String MAPREDUCE_JOB_CREDENTIALS_BINARY = 
+      "mapreduce.job.credentials.binary";
 
   public static final String JOB_SUBMITHOST =
     "mapreduce.job.submithostname";
@@ -367,6 +375,11 @@ public interface MRJobConfig {
   public static final String MR_AM_JOB_REDUCE_PREEMPTION_LIMIT = 
     MR_AM_PREFIX  + "job.reduce.preemption.limit";
   public static final float DEFAULT_MR_AM_JOB_REDUCE_PREEMPTION_LIMIT = 0.5f;
+  
+  /** AM ACL disabled. **/
+  public static final String JOB_AM_ACCESS_DISABLED = 
+    "mapreduce.job.am-access-disabled";
+  public static final boolean DEFAULT_JOB_AM_ACCESS_DISABLED = false;
 
   /**
    * Limit reduces starting until a certain percentage of maps have finished.
@@ -423,6 +436,26 @@ public interface MRJobConfig {
   public static final String MR_AM_CREATE_JH_INTERMEDIATE_BASE_DIR = 
     MR_AM_PREFIX + "create-intermediate-jh-base-dir";
   
+  public static final String MR_AM_HISTORY_MAX_UNFLUSHED_COMPLETE_EVENTS =
+      MR_AM_PREFIX + "history.max-unflushed-events";
+  public static final int DEFAULT_MR_AM_HISTORY_MAX_UNFLUSHED_COMPLETE_EVENTS =
+      200;
+
+  public static final String MR_AM_HISTORY_JOB_COMPLETE_UNFLUSHED_MULTIPLIER =
+      MR_AM_PREFIX + "history.job-complete-unflushed-multiplier";
+  public static final int DEFAULT_MR_AM_HISTORY_JOB_COMPLETE_UNFLUSHED_MULTIPLIER =
+      30;
+
+  public static final String MR_AM_HISTORY_COMPLETE_EVENT_FLUSH_TIMEOUT_MS =
+      MR_AM_PREFIX + "history.complete-event-flush-timeout";
+  public static final long DEFAULT_MR_AM_HISTORY_COMPLETE_EVENT_FLUSH_TIMEOUT_MS =
+      30 * 1000l;
+
+  public static final String MR_AM_HISTORY_USE_BATCHED_FLUSH_QUEUE_SIZE_THRESHOLD =
+      MR_AM_PREFIX + "history.use-batched-flush.queue-size.threshold";
+  public static final int DEFAULT_MR_AM_HISTORY_USE_BATCHED_FLUSH_QUEUE_SIZE_THRESHOLD =
+      50;
+  
   public static final String MAPRED_MAP_ADMIN_JAVA_OPTS =
       "mapreduce.admin.map.child.java.opts";
 
@@ -499,6 +532,9 @@ public interface MRJobConfig {
   public static final String MR_JOB_END_NOTIFICATION_URL =
     "mapreduce.job.end-notification.url";
 
+  public static final String MR_JOB_END_NOTIFICATION_PROXY =
+    "mapreduce.job.end-notification.proxy";
+
   public static final String MR_JOB_END_RETRY_ATTEMPTS =
     "mapreduce.job.end-notification.retry.attempts";
 

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java Mon Jan 16 04:24:24 2012
@@ -269,7 +269,7 @@ public class DistributedCache {
   /**
    * Add an file path to the current set of classpath entries It adds the file
    * to cache as well.  Intended to be used by user code.
-   * 
+   *
    * @param file Path of the file to be added
    * @param conf Configuration that contains the classpath setting
    * @deprecated Use {@link Job#addFileToClassPath(Path)} instead
@@ -277,12 +277,25 @@ public class DistributedCache {
   @Deprecated
   public static void addFileToClassPath(Path file, Configuration conf)
     throws IOException {
+	  addFileToClassPath(file, conf, file.getFileSystem(conf));
+  }
+
+  /**
+   * Add a file path to the current set of classpath entries. It adds the file
+   * to cache as well.  Intended to be used by user code.
+   *
+   * @param file Path of the file to be added
+   * @param conf Configuration that contains the classpath setting
+   * @param fs FileSystem with respect to which {@code archivefile} should
+   *              be interpreted.
+   */
+  public static void addFileToClassPath
+           (Path file, Configuration conf, FileSystem fs)
+        throws IOException {
     String classpath = conf.get(MRJobConfig.CLASSPATH_FILES);
     conf.set(MRJobConfig.CLASSPATH_FILES, classpath == null ? file.toString()
              : classpath + "," + file.toString());
-    FileSystem fs = FileSystem.get(conf);
     URI uri = fs.makeQualified(file).toUri();
-
     addCacheFile(uri, conf);
   }
 
@@ -318,10 +331,23 @@ public class DistributedCache {
   @Deprecated
   public static void addArchiveToClassPath(Path archive, Configuration conf)
     throws IOException {
+    addArchiveToClassPath(archive, conf, archive.getFileSystem(conf));
+  }
+
+  /**
+   * Add an archive path to the current set of classpath entries. It adds the
+   * archive to cache as well.  Intended to be used by user code.
+   *
+   * @param archive Path of the archive to be added
+   * @param conf Configuration that contains the classpath setting
+   * @param fs FileSystem with respect to which {@code archive} should be interpreted.
+   */
+  public static void addArchiveToClassPath
+         (Path archive, Configuration conf, FileSystem fs)
+      throws IOException {
     String classpath = conf.get(MRJobConfig.CLASSPATH_ARCHIVES);
     conf.set(MRJobConfig.CLASSPATH_ARCHIVES, classpath == null ? archive
              .toString() : classpath + "," + archive.toString());
-    FileSystem fs = FileSystem.get(conf);
     URI uri = fs.makeQualified(archive).toUri();
 
     addCacheArchive(uri, conf);

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobFinishedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobFinishedEvent.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobFinishedEvent.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobFinishedEvent.java Mon Jan 16 04:24:24 2012
@@ -18,15 +18,12 @@
 
 package org.apache.hadoop.mapreduce.jobhistory;
 
-import java.io.IOException;
-
+import org.apache.avro.util.Utf8;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.JobID;
 
-import org.apache.avro.util.Utf8;
-
 /**
  * Event to record successful completion of job
  *
@@ -34,7 +31,18 @@ import org.apache.avro.util.Utf8;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class JobFinishedEvent  implements HistoryEvent {
-  private JobFinished datum = new JobFinished();
+
+  private JobFinished datum = null;
+
+  private JobID jobId;
+  private long finishTime;
+  private int finishedMaps;
+  private int finishedReduces;
+  private int failedMaps;
+  private int failedReduces;
+  private Counters mapCounters;
+  private Counters reduceCounters;
+  private Counters totalCounters;
 
   /** 
    * Create an event to record successful job completion
@@ -53,50 +61,75 @@ public class JobFinishedEvent  implement
       int failedMaps, int failedReduces,
       Counters mapCounters, Counters reduceCounters,
       Counters totalCounters) {
-    datum.jobid = new Utf8(id.toString());
-    datum.finishTime = finishTime;
-    datum.finishedMaps = finishedMaps;
-    datum.finishedReduces = finishedReduces;
-    datum.failedMaps = failedMaps;
-    datum.failedReduces = failedReduces;
-    datum.mapCounters =
-      EventWriter.toAvro(mapCounters, "MAP_COUNTERS");
-    datum.reduceCounters =
-      EventWriter.toAvro(reduceCounters, "REDUCE_COUNTERS");
-    datum.totalCounters =
-      EventWriter.toAvro(totalCounters, "TOTAL_COUNTERS");
+    this.jobId = id;
+    this.finishTime = finishTime;
+    this.finishedMaps = finishedMaps;
+    this.finishedReduces = finishedReduces;
+    this.failedMaps = failedMaps;
+    this.failedReduces = failedReduces;
+    this.mapCounters = mapCounters;
+    this.reduceCounters = reduceCounters;
+    this.totalCounters = totalCounters;
   }
 
   JobFinishedEvent() {}
 
-  public Object getDatum() { return datum; }
-  public void setDatum(Object datum) { this.datum = (JobFinished)datum; }
+  public Object getDatum() {
+    if (datum == null) {
+      datum = new JobFinished();
+      datum.jobid = new Utf8(jobId.toString());
+      datum.finishTime = finishTime;
+      datum.finishedMaps = finishedMaps;
+      datum.finishedReduces = finishedReduces;
+      datum.failedMaps = failedMaps;
+      datum.failedReduces = failedReduces;
+      datum.mapCounters = EventWriter.toAvro(mapCounters, "MAP_COUNTERS");
+      datum.reduceCounters = EventWriter.toAvro(reduceCounters,
+        "REDUCE_COUNTERS");
+      datum.totalCounters = EventWriter.toAvro(totalCounters, "TOTAL_COUNTERS");
+    }
+    return datum;
+  }
+
+  public void setDatum(Object oDatum) {
+    this.datum = (JobFinished) oDatum;
+    this.jobId = JobID.forName(datum.jobid.toString());
+    this.finishTime = datum.finishTime;
+    this.finishedMaps = datum.finishedMaps;
+    this.finishedReduces = datum.finishedReduces;
+    this.failedMaps = datum.failedMaps;
+    this.failedReduces = datum.failedReduces;
+    this.mapCounters = EventReader.fromAvro(datum.mapCounters);
+    this.reduceCounters = EventReader.fromAvro(datum.reduceCounters);
+    this.totalCounters = EventReader.fromAvro(datum.totalCounters);
+  }
+
   public EventType getEventType() {
     return EventType.JOB_FINISHED;
   }
 
   /** Get the Job ID */
-  public JobID getJobid() { return JobID.forName(datum.jobid.toString()); }
+  public JobID getJobid() { return jobId; }
   /** Get the job finish time */
-  public long getFinishTime() { return datum.finishTime; }
+  public long getFinishTime() { return finishTime; }
   /** Get the number of finished maps for the job */
-  public int getFinishedMaps() { return datum.finishedMaps; }
+  public int getFinishedMaps() { return finishedMaps; }
   /** Get the number of finished reducers for the job */
-  public int getFinishedReduces() { return datum.finishedReduces; }
+  public int getFinishedReduces() { return finishedReduces; }
   /** Get the number of failed maps for the job */
-  public int getFailedMaps() { return datum.failedMaps; }
+  public int getFailedMaps() { return failedMaps; }
   /** Get the number of failed reducers for the job */
-  public int getFailedReduces() { return datum.failedReduces; }
+  public int getFailedReduces() { return failedReduces; }
   /** Get the counters for the job */
   public Counters getTotalCounters() {
-    return EventReader.fromAvro(datum.totalCounters);
+    return totalCounters;
   }
   /** Get the Map counters for the job */
   public Counters getMapCounters() {
-    return EventReader.fromAvro(datum.mapCounters);
+    return mapCounters;
   }
   /** Get the reduce counters for the job */
   public Counters getReduceCounters() {
-    return EventReader.fromAvro(datum.reduceCounters);
+    return reduceCounters;
   }
 }

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/MapAttemptFinishedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/MapAttemptFinishedEvent.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/MapAttemptFinishedEvent.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/MapAttemptFinishedEvent.java Mon Jan 16 04:24:24 2012
@@ -34,8 +34,25 @@ import org.apache.hadoop.mapreduce.TaskT
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class MapAttemptFinishedEvent  implements HistoryEvent {
-  private MapAttemptFinished datum = new MapAttemptFinished();
-  
+
+  private MapAttemptFinished datum = null;
+
+  private TaskAttemptID attemptId;
+  private TaskType taskType;
+  private String taskStatus;
+  private long finishTime;
+  private String hostname;
+  private String rackName;
+  private int port;
+  private long mapFinishTime;
+  private String state;
+  private Counters counters;
+  int[][] allSplits;
+  int[] clockSplits;
+  int[] cpuUsages;
+  int[] vMemKbytes;
+  int[] physMemKbytes;
+
   /** 
    * Create an event for successful completion of map attempts
    * @param id Task Attempt ID
@@ -60,33 +77,21 @@ public class MapAttemptFinishedEvent  im
       (TaskAttemptID id, TaskType taskType, String taskStatus, 
        long mapFinishTime, long finishTime, String hostname, int port, 
        String rackName, String state, Counters counters, int[][] allSplits) {
-    datum.taskid = new Utf8(id.getTaskID().toString());
-    datum.attemptId = new Utf8(id.toString());
-    datum.taskType = new Utf8(taskType.name());
-    datum.taskStatus = new Utf8(taskStatus);
-    datum.mapFinishTime = mapFinishTime;
-    datum.finishTime = finishTime;
-    datum.hostname = new Utf8(hostname);
-    datum.port = port;
-    // This is needed for reading old jh files
-    if (rackName != null) {
-      datum.rackname = new Utf8(rackName);
-    }
-    datum.state = new Utf8(state);
-    datum.counters = EventWriter.toAvro(counters);
-
-    datum.clockSplits
-      = AvroArrayUtils.toAvro
-           (ProgressSplitsBlock.arrayGetWallclockTime(allSplits));
-    datum.cpuUsages 
-      = AvroArrayUtils.toAvro
-           (ProgressSplitsBlock.arrayGetCPUTime(allSplits));
-    datum.vMemKbytes 
-      = AvroArrayUtils.toAvro
-           (ProgressSplitsBlock.arrayGetVMemKbytes(allSplits));
-    datum.physMemKbytes 
-      = AvroArrayUtils.toAvro
-           (ProgressSplitsBlock.arrayGetPhysMemKbytes(allSplits));
+    this.attemptId = id;
+    this.taskType = taskType;
+    this.taskStatus = taskStatus;
+    this.mapFinishTime = mapFinishTime;
+    this.finishTime = finishTime;
+    this.hostname = hostname;
+    this.rackName = rackName;
+    this.port = port;
+    this.state = state;
+    this.counters = counters;
+    this.allSplits = allSplits;
+    this.clockSplits = ProgressSplitsBlock.arrayGetWallclockTime(allSplits);
+    this.cpuUsages = ProgressSplitsBlock.arrayGetCPUTime(allSplits);
+    this.vMemKbytes = ProgressSplitsBlock.arrayGetVMemKbytes(allSplits);
+    this.physMemKbytes = ProgressSplitsBlock.arrayGetPhysMemKbytes(allSplits);
   }
 
   /** 
@@ -117,57 +122,100 @@ public class MapAttemptFinishedEvent  im
   
   MapAttemptFinishedEvent() {}
 
-  public Object getDatum() { return datum; }
-  public void setDatum(Object datum) {
-    this.datum = (MapAttemptFinished)datum;
+  public Object getDatum() {
+    if (datum == null) {
+      datum = new MapAttemptFinished();
+      datum.taskid = new Utf8(attemptId.getTaskID().toString());
+      datum.attemptId = new Utf8(attemptId.toString());
+      datum.taskType = new Utf8(taskType.name());
+      datum.taskStatus = new Utf8(taskStatus);
+      datum.mapFinishTime = mapFinishTime;
+      datum.finishTime = finishTime;
+      datum.hostname = new Utf8(hostname);
+      datum.port = port;
+      if (rackName != null) {
+        datum.rackname = new Utf8(rackName);
+      }
+      datum.state = new Utf8(state);
+      datum.counters = EventWriter.toAvro(counters);
+
+      datum.clockSplits = AvroArrayUtils.toAvro(ProgressSplitsBlock
+        .arrayGetWallclockTime(allSplits));
+      datum.cpuUsages = AvroArrayUtils.toAvro(ProgressSplitsBlock
+        .arrayGetCPUTime(allSplits));
+      datum.vMemKbytes = AvroArrayUtils.toAvro(ProgressSplitsBlock
+        .arrayGetVMemKbytes(allSplits));
+      datum.physMemKbytes = AvroArrayUtils.toAvro(ProgressSplitsBlock
+        .arrayGetPhysMemKbytes(allSplits));
+    }
+    return datum;
+  }
+
+  public void setDatum(Object oDatum) {
+    this.datum = (MapAttemptFinished)oDatum;
+    this.attemptId = TaskAttemptID.forName(datum.attemptId.toString());
+    this.taskType = TaskType.valueOf(datum.taskType.toString());
+    this.taskStatus = datum.taskStatus.toString();
+    this.mapFinishTime = datum.mapFinishTime;
+    this.finishTime = datum.finishTime;
+    this.hostname = datum.hostname.toString();
+    this.rackName = datum.rackname.toString();
+    this.port = datum.port;
+    this.state = datum.state.toString();
+    this.counters = EventReader.fromAvro(datum.counters);
+    this.clockSplits = AvroArrayUtils.fromAvro(datum.clockSplits);
+    this.cpuUsages = AvroArrayUtils.fromAvro(datum.cpuUsages);
+    this.vMemKbytes = AvroArrayUtils.fromAvro(datum.vMemKbytes);
+    this.physMemKbytes = AvroArrayUtils.fromAvro(datum.physMemKbytes);
   }
 
   /** Get the task ID */
-  public TaskID getTaskId() { return TaskID.forName(datum.taskid.toString()); }
+  public TaskID getTaskId() { return attemptId.getTaskID(); }
   /** Get the attempt id */
   public TaskAttemptID getAttemptId() {
-    return TaskAttemptID.forName(datum.attemptId.toString());
+    return attemptId;
   }
+
   /** Get the task type */
   public TaskType getTaskType() {
-    return TaskType.valueOf(datum.taskType.toString());
+    return TaskType.valueOf(taskType.toString());
   }
   /** Get the task status */
-  public String getTaskStatus() { return datum.taskStatus.toString(); }
+  public String getTaskStatus() { return taskStatus.toString(); }
   /** Get the map phase finish time */
-  public long getMapFinishTime() { return datum.mapFinishTime; }
+  public long getMapFinishTime() { return mapFinishTime; }
   /** Get the attempt finish time */
-  public long getFinishTime() { return datum.finishTime; }
+  public long getFinishTime() { return finishTime; }
   /** Get the host name */
-  public String getHostname() { return datum.hostname.toString(); }
+  public String getHostname() { return hostname.toString(); }
   /** Get the tracker rpc port */
-  public int getPort() { return datum.port; }
+  public int getPort() { return port; }
   
   /** Get the rack name */
   public String getRackName() {
-    return datum.rackname == null ? null : datum.rackname.toString();
+    return rackName == null ? null : rackName.toString();
   }
   
   /** Get the state string */
-  public String getState() { return datum.state.toString(); }
+  public String getState() { return state.toString(); }
   /** Get the counters */
-  Counters getCounters() { return EventReader.fromAvro(datum.counters); }
+  Counters getCounters() { return counters; }
   /** Get the event type */
    public EventType getEventType() {
     return EventType.MAP_ATTEMPT_FINISHED;
   }
 
   public int[] getClockSplits() {
-    return AvroArrayUtils.fromAvro(datum.clockSplits);
+    return clockSplits;
   }
   public int[] getCpuUsages() {
-    return AvroArrayUtils.fromAvro(datum.cpuUsages);
+    return cpuUsages;
   }
   public int[] getVMemKbytes() {
-    return AvroArrayUtils.fromAvro(datum.vMemKbytes);
+    return vMemKbytes;
   }
   public int[] getPhysMemKbytes() {
-    return AvroArrayUtils.fromAvro(datum.physMemKbytes);
+    return physMemKbytes;
   }
   
 }

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/ReduceAttemptFinishedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/ReduceAttemptFinishedEvent.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/ReduceAttemptFinishedEvent.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/ReduceAttemptFinishedEvent.java Mon Jan 16 04:24:24 2012
@@ -34,8 +34,25 @@ import org.apache.hadoop.mapreduce.TaskT
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class ReduceAttemptFinishedEvent  implements HistoryEvent {
-  private ReduceAttemptFinished datum =
-    new ReduceAttemptFinished();
+
+  private ReduceAttemptFinished datum = null;
+
+  private TaskAttemptID attemptId;
+  private TaskType taskType;
+  private String taskStatus;
+  private long shuffleFinishTime;
+  private long sortFinishTime;
+  private long finishTime;
+  private String hostname;
+  private String rackName;
+  private int port;
+  private String state;
+  private Counters counters;
+  int[][] allSplits;
+  int[] clockSplits;
+  int[] cpuUsages;
+  int[] vMemKbytes;
+  int[] physMemKbytes;
 
   /**
    * Create an event to record completion of a reduce attempt
@@ -60,33 +77,22 @@ public class ReduceAttemptFinishedEvent 
      long shuffleFinishTime, long sortFinishTime, long finishTime,
      String hostname, int port,  String rackName, String state, 
      Counters counters, int[][] allSplits) {
-    datum.taskid = new Utf8(id.getTaskID().toString());
-    datum.attemptId = new Utf8(id.toString());
-    datum.taskType = new Utf8(taskType.name());
-    datum.taskStatus = new Utf8(taskStatus);
-    datum.shuffleFinishTime = shuffleFinishTime;
-    datum.sortFinishTime = sortFinishTime;
-    datum.finishTime = finishTime;
-    datum.hostname = new Utf8(hostname);
-    datum.port = port;
-    if (rackName != null) {
-      datum.rackname = new Utf8(rackName);
-    }
-    datum.state = new Utf8(state);
-    datum.counters = EventWriter.toAvro(counters);
-
-    datum.clockSplits 
-      = AvroArrayUtils.toAvro
-           (ProgressSplitsBlock.arrayGetWallclockTime(allSplits));
-    datum.cpuUsages 
-      = AvroArrayUtils.toAvro
-           (ProgressSplitsBlock.arrayGetCPUTime(allSplits));
-    datum.vMemKbytes 
-      = AvroArrayUtils.toAvro
-           (ProgressSplitsBlock.arrayGetVMemKbytes(allSplits));
-    datum.physMemKbytes 
-      = AvroArrayUtils.toAvro
-           (ProgressSplitsBlock.arrayGetPhysMemKbytes(allSplits));
+    this.attemptId = id;
+    this.taskType = taskType;
+    this.taskStatus = taskStatus;
+    this.shuffleFinishTime = shuffleFinishTime;
+    this.sortFinishTime = sortFinishTime;
+    this.finishTime = finishTime;
+    this.hostname = hostname;
+    this.rackName = rackName;
+    this.port = port;
+    this.state = state;
+    this.counters = counters;
+    this.allSplits = allSplits;
+    this.clockSplits = ProgressSplitsBlock.arrayGetWallclockTime(allSplits);
+    this.cpuUsages = ProgressSplitsBlock.arrayGetCPUTime(allSplits);
+    this.vMemKbytes = ProgressSplitsBlock.arrayGetVMemKbytes(allSplits);
+    this.physMemKbytes = ProgressSplitsBlock.arrayGetPhysMemKbytes(allSplits);
   }
 
   /**
@@ -117,43 +123,87 @@ public class ReduceAttemptFinishedEvent 
 
   ReduceAttemptFinishedEvent() {}
 
-  public Object getDatum() { return datum; }
-  public void setDatum(Object datum) {
-    this.datum = (ReduceAttemptFinished)datum;
+  public Object getDatum() {
+    if (datum == null) {
+      datum = new ReduceAttemptFinished();
+      datum.taskid = new Utf8(attemptId.getTaskID().toString());
+      datum.attemptId = new Utf8(attemptId.toString());
+      datum.taskType = new Utf8(taskType.name());
+      datum.taskStatus = new Utf8(taskStatus);
+      datum.shuffleFinishTime = shuffleFinishTime;
+      datum.sortFinishTime = sortFinishTime;
+      datum.finishTime = finishTime;
+      datum.hostname = new Utf8(hostname);
+      datum.port = port;
+      if (rackName != null) {
+        datum.rackname = new Utf8(rackName);
+      }
+      datum.state = new Utf8(state);
+      datum.counters = EventWriter.toAvro(counters);
+
+      datum.clockSplits = AvroArrayUtils.toAvro(ProgressSplitsBlock
+        .arrayGetWallclockTime(allSplits));
+      datum.cpuUsages = AvroArrayUtils.toAvro(ProgressSplitsBlock
+        .arrayGetCPUTime(allSplits));
+      datum.vMemKbytes = AvroArrayUtils.toAvro(ProgressSplitsBlock
+        .arrayGetVMemKbytes(allSplits));
+      datum.physMemKbytes = AvroArrayUtils.toAvro(ProgressSplitsBlock
+        .arrayGetPhysMemKbytes(allSplits));
+    }
+    return datum;
+  }
+
+  public void setDatum(Object oDatum) {
+    this.datum = (ReduceAttemptFinished)oDatum;
+    this.attemptId = TaskAttemptID.forName(datum.attemptId.toString());
+    this.taskType = TaskType.valueOf(datum.taskType.toString());
+    this.taskStatus = datum.taskStatus.toString();
+    this.shuffleFinishTime = datum.shuffleFinishTime;
+    this.sortFinishTime = datum.sortFinishTime;
+    this.finishTime = datum.finishTime;
+    this.hostname = datum.hostname.toString();
+    this.rackName = datum.rackname.toString();
+    this.port = datum.port;
+    this.state = datum.state.toString();
+    this.counters = EventReader.fromAvro(datum.counters);
+    this.clockSplits = AvroArrayUtils.fromAvro(datum.clockSplits);
+    this.cpuUsages = AvroArrayUtils.fromAvro(datum.cpuUsages);
+    this.vMemKbytes = AvroArrayUtils.fromAvro(datum.vMemKbytes);
+    this.physMemKbytes = AvroArrayUtils.fromAvro(datum.physMemKbytes);
   }
 
   /** Get the Task ID */
-  public TaskID getTaskId() { return TaskID.forName(datum.taskid.toString()); }
+  public TaskID getTaskId() { return attemptId.getTaskID(); }
   /** Get the attempt id */
   public TaskAttemptID getAttemptId() {
-    return TaskAttemptID.forName(datum.attemptId.toString());
+    return TaskAttemptID.forName(attemptId.toString());
   }
   /** Get the task type */
   public TaskType getTaskType() {
-    return TaskType.valueOf(datum.taskType.toString());
+    return TaskType.valueOf(taskType.toString());
   }
   /** Get the task status */
-  public String getTaskStatus() { return datum.taskStatus.toString(); }
+  public String getTaskStatus() { return taskStatus.toString(); }
   /** Get the finish time of the sort phase */
-  public long getSortFinishTime() { return datum.sortFinishTime; }
+  public long getSortFinishTime() { return sortFinishTime; }
   /** Get the finish time of the shuffle phase */
-  public long getShuffleFinishTime() { return datum.shuffleFinishTime; }
+  public long getShuffleFinishTime() { return shuffleFinishTime; }
   /** Get the finish time of the attempt */
-  public long getFinishTime() { return datum.finishTime; }
+  public long getFinishTime() { return finishTime; }
   /** Get the name of the host where the attempt ran */
-  public String getHostname() { return datum.hostname.toString(); }
+  public String getHostname() { return hostname.toString(); }
   /** Get the tracker rpc port */
-  public int getPort() { return datum.port; }
+  public int getPort() { return port; }
   
   /** Get the rack name of the node where the attempt ran */
   public String getRackName() {
-    return datum.rackname == null ? null : datum.rackname.toString();
+    return rackName == null ? null : rackName.toString();
   }
   
   /** Get the state string */
-  public String getState() { return datum.state.toString(); }
+  public String getState() { return state.toString(); }
   /** Get the counters for the attempt */
-  Counters getCounters() { return EventReader.fromAvro(datum.counters); }
+  Counters getCounters() { return counters; }
   /** Get the event type */
   public EventType getEventType() {
     return EventType.REDUCE_ATTEMPT_FINISHED;
@@ -161,16 +211,16 @@ public class ReduceAttemptFinishedEvent 
 
 
   public int[] getClockSplits() {
-    return AvroArrayUtils.fromAvro(datum.clockSplits);
+    return clockSplits;
   }
   public int[] getCpuUsages() {
-    return AvroArrayUtils.fromAvro(datum.cpuUsages);
+    return cpuUsages;
   }
   public int[] getVMemKbytes() {
-    return AvroArrayUtils.fromAvro(datum.vMemKbytes);
+    return vMemKbytes;
   }
   public int[] getPhysMemKbytes() {
-    return AvroArrayUtils.fromAvro(datum.physMemKbytes);
+    return physMemKbytes;
   }
 
 }

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptFinishedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptFinishedEvent.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptFinishedEvent.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptFinishedEvent.java Mon Jan 16 04:24:24 2012
@@ -18,8 +18,7 @@
 
 package org.apache.hadoop.mapreduce.jobhistory;
 
-import java.io.IOException;
-
+import org.apache.avro.util.Utf8;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.mapreduce.Counters;
@@ -27,8 +26,6 @@ import org.apache.hadoop.mapreduce.TaskA
 import org.apache.hadoop.mapreduce.TaskID;
 import org.apache.hadoop.mapreduce.TaskType;
 
-import org.apache.avro.util.Utf8;
-
 /**
  * Event to record successful task completion
  *
@@ -36,7 +33,17 @@ import org.apache.avro.util.Utf8;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class TaskAttemptFinishedEvent  implements HistoryEvent {
-  private TaskAttemptFinished datum = new TaskAttemptFinished();
+
+  private TaskAttemptFinished datum = null;
+
+  private TaskAttemptID attemptId;
+  private TaskType taskType;
+  private String taskStatus;
+  private long finishTime;
+  private String rackName;
+  private String hostname;
+  private String state;
+  private Counters counters;
 
   /**
    * Create an event to record successful finishes for setup and cleanup 
@@ -53,52 +60,73 @@ public class TaskAttemptFinishedEvent  i
       TaskType taskType, String taskStatus, 
       long finishTime, String rackName,
       String hostname, String state, Counters counters) {
-    datum.taskid = new Utf8(id.getTaskID().toString());
-    datum.attemptId = new Utf8(id.toString());
-    datum.taskType = new Utf8(taskType.name());
-    datum.taskStatus = new Utf8(taskStatus);
-    datum.finishTime = finishTime;
-    if (rackName != null) {
-      datum.rackname = new Utf8(rackName);
-    }
-    datum.hostname = new Utf8(hostname);
-    datum.state = new Utf8(state);
-    datum.counters = EventWriter.toAvro(counters);
+    this.attemptId = id;
+    this.taskType = taskType;
+    this.taskStatus = taskStatus;
+    this.finishTime = finishTime;
+    this.rackName = rackName;
+    this.hostname = hostname;
+    this.state = state;
+    this.counters = counters;
   }
 
   TaskAttemptFinishedEvent() {}
 
-  public Object getDatum() { return datum; }
-  public void setDatum(Object datum) {
-    this.datum = (TaskAttemptFinished)datum;
+  public Object getDatum() {
+    if (datum == null) {
+      datum = new TaskAttemptFinished();
+      datum.taskid = new Utf8(attemptId.getTaskID().toString());
+      datum.attemptId = new Utf8(attemptId.toString());
+      datum.taskType = new Utf8(taskType.name());
+      datum.taskStatus = new Utf8(taskStatus);
+      datum.finishTime = finishTime;
+      if (rackName != null) {
+        datum.rackname = new Utf8(rackName);
+      }
+      datum.hostname = new Utf8(hostname);
+      datum.state = new Utf8(state);
+      datum.counters = EventWriter.toAvro(counters);
+    }
+    return datum;
+  }
+  public void setDatum(Object oDatum) {
+    this.datum = (TaskAttemptFinished)oDatum;
+    this.attemptId = TaskAttemptID.forName(datum.attemptId.toString());
+    this.taskType = TaskType.valueOf(datum.taskType.toString());
+    this.taskStatus = datum.taskStatus.toString();
+    this.finishTime = datum.finishTime;
+    this.rackName = datum.rackname.toString();
+    this.hostname = datum.hostname.toString();
+    this.state = datum.state.toString();
+    this.counters = EventReader.fromAvro(datum.counters);
   }
 
   /** Get the task ID */
-  public TaskID getTaskId() { return TaskID.forName(datum.taskid.toString()); }
+  public TaskID getTaskId() { return attemptId.getTaskID(); }
   /** Get the task attempt id */
   public TaskAttemptID getAttemptId() {
-    return TaskAttemptID.forName(datum.attemptId.toString());
+    return TaskAttemptID.forName(attemptId.toString());
   }
   /** Get the task type */
   public TaskType getTaskType() {
-    return TaskType.valueOf(datum.taskType.toString());
+    return TaskType.valueOf(taskType.toString());
   }
   /** Get the task status */
-  public String getTaskStatus() { return datum.taskStatus.toString(); }
+  public String getTaskStatus() { return taskStatus.toString(); }
   /** Get the attempt finish time */
-  public long getFinishTime() { return datum.finishTime; }
+  public long getFinishTime() { return finishTime; }
   /** Get the host where the attempt executed */
-  public String getHostname() { return datum.hostname.toString(); }
+  public String getHostname() { return hostname.toString(); }
   
   /** Get the rackname where the attempt executed */
   public String getRackName() {
-    return datum.rackname == null ? null : datum.rackname.toString();
+    return rackName == null ? null : rackName.toString();
   }
   
   /** Get the state string */
-  public String getState() { return datum.state.toString(); }
+  public String getState() { return state.toString(); }
   /** Get the counters for the attempt */
-  Counters getCounters() { return EventReader.fromAvro(datum.counters); }
+  Counters getCounters() { return counters; }
   /** Get the event type */
   public EventType getEventType() {
     // Note that the task type can be setup/map/reduce/cleanup but the 

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskFinishedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskFinishedEvent.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskFinishedEvent.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskFinishedEvent.java Mon Jan 16 04:24:24 2012
@@ -18,16 +18,13 @@
 
 package org.apache.hadoop.mapreduce.jobhistory;
 
-import java.io.IOException;
-
+import org.apache.avro.util.Utf8;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.TaskID;
 import org.apache.hadoop.mapreduce.TaskType;
 
-import org.apache.avro.util.Utf8;
-
 /**
  * Event to record the successful completion of a task
  *
@@ -35,7 +32,14 @@ import org.apache.avro.util.Utf8;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class TaskFinishedEvent implements HistoryEvent {
-  private TaskFinished datum = new TaskFinished();
+
+  private TaskFinished datum = null;
+
+  private TaskID taskid;
+  private long finishTime;
+  private TaskType taskType;
+  private String status;
+  private Counters counters;
   
   /**
    * Create an event to record the successful completion of a task
@@ -48,32 +52,48 @@ public class TaskFinishedEvent implement
   public TaskFinishedEvent(TaskID id, long finishTime,
                            TaskType taskType,
                            String status, Counters counters) {
-    datum.taskid = new Utf8(id.toString());
-    datum.finishTime = finishTime;
-    datum.counters = EventWriter.toAvro(counters);
-    datum.taskType = new Utf8(taskType.name());
-    datum.status = new Utf8(status);
+    this.taskid = id;
+    this.finishTime = finishTime;
+    this.taskType = taskType;
+    this.status = status;
+    this.counters = counters;
   }
   
   TaskFinishedEvent() {}
 
-  public Object getDatum() { return datum; }
-  public void setDatum(Object datum) {
-    this.datum = (TaskFinished)datum;
+  public Object getDatum() {
+    if (datum == null) {
+      datum = new TaskFinished();
+      datum.taskid = new Utf8(taskid.toString());
+      datum.finishTime = finishTime;
+      datum.counters = EventWriter.toAvro(counters);
+      datum.taskType = new Utf8(taskType.name());
+      datum.status = new Utf8(status);
+    }
+    return datum;
+  }
+
+  public void setDatum(Object oDatum) {
+    this.datum = (TaskFinished)oDatum;
+    this.taskid = TaskID.forName(datum.taskid.toString());
+    this.finishTime = datum.finishTime;
+    this.taskType = TaskType.valueOf(datum.taskType.toString());
+    this.status = datum.status.toString();
+    this.counters = EventReader.fromAvro(datum.counters);
   }
 
   /** Get task id */
-  public TaskID getTaskId() { return TaskID.forName(datum.taskid.toString()); }
+  public TaskID getTaskId() { return TaskID.forName(taskid.toString()); }
   /** Get the task finish time */
-  public long getFinishTime() { return datum.finishTime; }
+  public long getFinishTime() { return finishTime; }
   /** Get task counters */
-  public Counters getCounters() { return EventReader.fromAvro(datum.counters); }
+  public Counters getCounters() { return counters; }
   /** Get task type */
   public TaskType getTaskType() {
-    return TaskType.valueOf(datum.taskType.toString());
+    return TaskType.valueOf(taskType.toString());
   }
   /** Get task status */
-  public String getTaskStatus() { return datum.status.toString(); }
+  public String getTaskStatus() { return status.toString(); }
   /** Get event type */
   public EventType getEventType() {
     return EventType.TASK_FINISHED;

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java Mon Jan 16 04:24:24 2012
@@ -32,6 +32,7 @@ import org.apache.hadoop.hdfs.security.t
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Master;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -101,7 +102,7 @@ public class TokenCache {
     String delegTokenRenewer = Master.getMasterPrincipal(conf);
     if (delegTokenRenewer == null || delegTokenRenewer.length() == 0) {
       throw new IOException(
-          "Can't get JobTracker Kerberos principal for use as renewer");
+          "Can't get Master Kerberos principal for use as renewer");
     }
     boolean readFile = true;
 
@@ -112,7 +113,7 @@ public class TokenCache {
       if (readFile) {
         readFile = false;
         String binaryTokenFilename =
-          conf.get("mapreduce.job.credentials.binary");
+          conf.get(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY);
         if (binaryTokenFilename != null) {
           Credentials binary;
           try {
@@ -172,10 +173,14 @@ public class TokenCache {
   @InterfaceAudience.Private
   public static Token<DelegationTokenIdentifier> getDelegationToken(
       Credentials credentials, String namenode) {
+    //No fs specific tokens issues by this fs. It may however issue tokens
+    // for other filesystems - which would be keyed by that filesystems name.
+    if (namenode == null)  
+      return null;
     return (Token<DelegationTokenIdentifier>) credentials.getToken(new Text(
         namenode));
   }
-  
+
   /**
    * load job token from a file
    * @param conf

Propchange: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Jan 16 04:24:24 2012
@@ -1,3 +1,4 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:1164771,1167318,1167383,1170379,1170459,1171297,1172916,1173402,1176550,1177487,1177531,1177859,1177864,1189932,1189982,1195575,1196113,1196129,1204114,1204117,1204122,1204124,1204129,1204131,1204370,1204376,1204388,1205260,1206786,1206830,1207694,1208153,1208313,1212021,1212062,1212073,1212084,1213537,1213586,1213592-1213593,1213954,1214046,1214066,1220510,1221348,1226211,1227091,1227423
+/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:1227776-1231827
+/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:1164771,1167318,1167383,1170379,1170459,1171297,1172916,1173402,1176550,1177487,1177531,1177859,1177864,1182205,1189932,1189982,1195575,1196113,1196129,1204114,1204117,1204122,1204124,1204129,1204131,1204177,1204370,1204376,1204388,1205260,1205697,1206786,1206830,1207694,1208153,1208313,1212021,1212062,1212073,1212084,1213537,1213586,1213592-1213593,1213954,1214046,1214066,1220510,1221348,1225192,1225456,1225489,1225591,1226211,1226239,1227091,1227165,1227423,1229347,1230398,1231569,1231572,1231627,1231640
 /hadoop/core/branches/branch-0.19/mapred/src/java/mapred-default.xml:713112
 /hadoop/core/trunk/src/mapred/mapred-default.xml:776175-785643

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/security/TestTokenCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/security/TestTokenCache.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/security/TestTokenCache.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/security/TestTokenCache.java Mon Jan 16 04:24:24 2012
@@ -130,7 +130,7 @@ public class TestTokenCache {
   private FileSystem setupMultiFs(final FileSystem singleFs,
       final String renewer, final Credentials credentials) throws Exception {
     FileSystem mockFs = mock(FileSystem.class);
-    when(mockFs.getCanonicalServiceName()).thenReturn("multifs");
+    when(mockFs.getCanonicalServiceName()).thenReturn(null);
     when(mockFs.getUri()).thenReturn(new URI("multifs:///"));
 
     when(mockFs.getDelegationTokens(any(String.class))).thenThrow(

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java Mon Jan 16 04:24:24 2012
@@ -32,13 +32,13 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobACLsManager;
+import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.JobACL;
 import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
 import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
-import org.apache.hadoop.mapreduce.v2.api.records.Counters;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
 import org.apache.hadoop.mapreduce.v2.api.records.JobState;
@@ -89,7 +89,7 @@ public class CompletedJob implements org
     
     loadFullHistoryData(loadTasks, historyFile);
     user = userName;
-    counters = TypeConverter.toYarn(jobInfo.getTotalCounters());
+    counters = jobInfo.getTotalCounters();
     diagnostics.add(jobInfo.getErrorInfo());
     report =
         RecordFactoryProvider.getRecordFactory(null).newRecordInstance(
@@ -121,7 +121,7 @@ public class CompletedJob implements org
   }
 
   @Override
-  public Counters getCounters() {
+  public Counters getAllCounters() {
     return counters;
   }
 

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java Mon Jan 16 04:24:24 2012
@@ -24,10 +24,10 @@ import java.util.Map;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
-import org.apache.hadoop.mapreduce.v2.api.records.Counters;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
@@ -60,7 +60,7 @@ public class CompletedTask implements Ta
     this.finishTime = taskInfo.getFinishTime();
     this.type = TypeConverter.toYarn(taskInfo.getTaskType());
     if (taskInfo.getCounters() != null)
-      this.counters = TypeConverter.toYarn(taskInfo.getCounters());
+      this.counters = taskInfo.getCounters();
     if (taskInfo.getTaskStatus() != null) {
       this.state = TaskState.valueOf(taskInfo.getTaskStatus());
     } else {
@@ -86,7 +86,7 @@ public class CompletedTask implements Ta
     report.setFinishTime(finishTime);
     report.setTaskState(state);
     report.setProgress(getProgress());
-    report.setCounters(getCounters());
+    report.setCounters(TypeConverter.toYarn(getCounters()));
     report.addAllRunningAttempts(new ArrayList<TaskAttemptId>(attempts.keySet()));
   }
 

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java Mon Jan 16 04:24:24 2012
@@ -21,9 +21,9 @@ package org.apache.hadoop.mapreduce.v2.h
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
-import org.apache.hadoop.mapreduce.v2.api.records.Counters;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
@@ -46,8 +46,9 @@ public class CompletedTaskAttempt implem
   CompletedTaskAttempt(TaskId taskId, TaskAttemptInfo attemptInfo) {
     this.attemptInfo = attemptInfo;
     this.attemptId = TypeConverter.toYarn(attemptInfo.getAttemptId());
-    if (attemptInfo.getCounters() != null)
-      this.counters = TypeConverter.toYarn(attemptInfo.getCounters());
+    if (attemptInfo.getCounters() != null) {
+      this.counters = attemptInfo.getCounters();
+    }
     if (attemptInfo.getTaskStatus() != null) {
       this.state = TaskAttemptState.valueOf(attemptInfo.getTaskStatus());
     } else {
@@ -61,7 +62,6 @@ public class CompletedTaskAttempt implem
     }
     
     report = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptReport.class);
-    report.setCounters(counters);
     
     report.setTaskAttemptId(attemptId);
     report.setTaskAttemptState(state);
@@ -78,7 +78,7 @@ public class CompletedTaskAttempt implem
     }
 //    report.setPhase(attemptInfo.get); //TODO
     report.setStateString(attemptInfo.getState());
-    report.setCounters(getCounters());
+    report.setCounters(TypeConverter.toYarn(getCounters()));
     report.setContainerId(attemptInfo.getContainerId());
     if (attemptInfo.getHostname() == null) {
       report.setNodeManagerHost("UNKNOWN");

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java Mon Jan 16 04:24:24 2012
@@ -33,11 +33,15 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.mapreduce.JobACL;
+import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
+import org.apache.hadoop.mapreduce.v2.api.MRDelegationTokenIdentifier;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsResponse;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest;
@@ -67,13 +71,17 @@ import org.apache.hadoop.mapreduce.v2.hs
 import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.yarn.api.records.DelegationToken;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.ipc.RPCUtil;
 import org.apache.hadoop.yarn.ipc.YarnRPC;
 import org.apache.hadoop.yarn.service.AbstractService;
+import org.apache.hadoop.yarn.util.BuilderUtils;
 import org.apache.hadoop.yarn.webapp.WebApp;
 import org.apache.hadoop.yarn.webapp.WebApps;
 
@@ -91,11 +99,14 @@ public class HistoryClientService extend
   private WebApp webApp;
   private InetSocketAddress bindAddress;
   private HistoryContext history;
-
-  public HistoryClientService(HistoryContext history) {
+  private JHSDelegationTokenSecretManager jhsDTSecretManager;
+  
+  public HistoryClientService(HistoryContext history,
+      JHSDelegationTokenSecretManager jhsDTSecretManager) {
     super("HistoryClientService");
     this.history = history;
     this.protocolHandler = new MRClientProtocolHandler();
+    this.jhsDTSecretManager = jhsDTSecretManager;
   }
 
   public void start() {
@@ -109,14 +120,15 @@ public class HistoryClientService extend
       JHAdminConfig.DEFAULT_MR_HISTORY_ADDRESS);
     InetAddress hostNameResolved = null;
     try {
-      hostNameResolved = InetAddress.getLocalHost(); //address.getAddress().getLocalHost();
+      hostNameResolved = InetAddress.getLocalHost(); 
+      //address.getAddress().getLocalHost();
     } catch (UnknownHostException e) {
       throw new YarnException(e);
     }
 
     server =
         rpc.getServer(MRClientProtocol.class, protocolHandler, address,
-            conf, null,
+            conf, jhsDTSecretManager,
             conf.getInt(JHAdminConfig.MR_HISTORY_CLIENT_THREAD_COUNT,
                 JHAdminConfig.DEFAULT_MR_HISTORY_CLIENT_THREAD_COUNT));
 
@@ -190,7 +202,7 @@ public class HistoryClientService extend
       JobId jobId = request.getJobId();
       Job job = verifyAndGetJob(jobId);
       GetCountersResponse response = recordFactory.newRecordInstance(GetCountersResponse.class);
-      response.setCounters(job.getCounters());
+      response.setCounters(TypeConverter.toYarn(job.getAllCounters()));
       return response;
     }
 
@@ -277,6 +289,38 @@ public class HistoryClientService extend
       }
       return response;
     }
+    
+    @Override
+    public GetDelegationTokenResponse getDelegationToken(
+        GetDelegationTokenRequest request) throws YarnRemoteException {
+
+      try {
+      // Verify that the connection is kerberos authenticated
+      AuthenticationMethod authMethod = UserGroupInformation
+        .getRealAuthenticationMethod(UserGroupInformation.getCurrentUser());
+      if (UserGroupInformation.isSecurityEnabled()
+          && (authMethod != AuthenticationMethod.KERBEROS)) {
+       throw new IOException(
+          "Delegation Token can be issued only with kerberos authentication");
+      }
+
+      GetDelegationTokenResponse response = recordFactory.newRecordInstance(
+          GetDelegationTokenResponse.class);
+      MRDelegationTokenIdentifier tokenIdentifier =
+          new MRDelegationTokenIdentifier();
+      Token<MRDelegationTokenIdentifier> realJHSToken =
+          new Token<MRDelegationTokenIdentifier>(tokenIdentifier,
+              jhsDTSecretManager);
+      DelegationToken mrDToken = BuilderUtils.newDelegationToken(
+        realJHSToken.getIdentifier(), realJHSToken.getKind().toString(),
+        realJHSToken.getPassword(), bindAddress.getAddress().getHostAddress()
+            + ":" + bindAddress.getPort());
+      response.setDelegationToken(mrDToken);
+      return response;
+      } catch (IOException i) {
+        throw RPCUtil.getRemoteException(i);
+      }
+    }
 
     private void checkAccess(Job job, JobACL jobOperation)
         throws YarnRemoteException {

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java Mon Jan 16 04:24:24 2012
@@ -24,6 +24,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.util.StringUtils;
@@ -41,6 +42,7 @@ public class JobHistoryServer extends Co
   private HistoryContext historyContext;
   private HistoryClientService clientService;
   private JobHistory jobHistoryService;
+  private JHSDelegationTokenSecretManager jhsDTSecretManager;
 
   public JobHistoryServer() {
     super(JobHistoryServer.class.getName());
@@ -56,17 +58,52 @@ public class JobHistoryServer extends Co
     }
     jobHistoryService = new JobHistory();
     historyContext = (HistoryContext)jobHistoryService;
-    clientService = new HistoryClientService(historyContext);
+    this.jhsDTSecretManager = createJHSSecretManager(conf);
+    clientService = new HistoryClientService(historyContext, 
+        this.jhsDTSecretManager);
     addService(jobHistoryService);
     addService(clientService);
     super.init(config);
   }
 
+  protected JHSDelegationTokenSecretManager createJHSSecretManager(
+      Configuration conf) {
+    long secretKeyInterval = 
+        conf.getLong(MRConfig.DELEGATION_KEY_UPDATE_INTERVAL_KEY, 
+                     MRConfig.DELEGATION_KEY_UPDATE_INTERVAL_DEFAULT);
+      long tokenMaxLifetime =
+        conf.getLong(MRConfig.DELEGATION_TOKEN_MAX_LIFETIME_KEY,
+                     MRConfig.DELEGATION_TOKEN_MAX_LIFETIME_DEFAULT);
+      long tokenRenewInterval =
+        conf.getLong(MRConfig.DELEGATION_TOKEN_RENEW_INTERVAL_KEY, 
+                     MRConfig.DELEGATION_TOKEN_RENEW_INTERVAL_DEFAULT);
+      
+    return new JHSDelegationTokenSecretManager(secretKeyInterval, 
+        tokenMaxLifetime, tokenRenewInterval, 3600000);
+  }
+  
   protected void doSecureLogin(Configuration conf) throws IOException {
     SecurityUtil.login(conf, JHAdminConfig.MR_HISTORY_KEYTAB,
         JHAdminConfig.MR_HISTORY_PRINCIPAL);
   }
 
+  @Override
+  public void start() {
+    try {
+      jhsDTSecretManager.startThreads();
+    } catch(IOException io) {
+      LOG.error("Error while starting the Secret Manager threads", io);
+      throw new RuntimeException(io);
+    }
+    super.start();
+  }
+  
+  @Override
+  public void stop() {
+    jhsDTSecretManager.stopThreads();
+    super.stop();
+  }
+  
   public static void main(String[] args) {
     StringUtils.startupShutdownMessage(JobHistoryServer.class, args, LOG);
     try {

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/PartialJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/PartialJob.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/PartialJob.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/PartialJob.java Mon Jan 16 04:24:24 2012
@@ -22,9 +22,9 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.JobACL;
 import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
-import org.apache.hadoop.mapreduce.v2.api.records.Counters;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
 import org.apache.hadoop.mapreduce.v2.api.records.JobState;
@@ -95,7 +95,7 @@ public class PartialJob implements org.a
   }
 
   @Override
-  public Counters getCounters() {
+  public Counters getAllCounters() {
     return null;
   }
 

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java Mon Jan 16 04:24:24 2012
@@ -31,14 +31,13 @@ import javax.ws.rs.core.UriInfo;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
-import org.apache.hadoop.mapreduce.v2.api.records.JobId;
-import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.app.webapp.AMWebServices;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobCounterInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptCounterInfo;
@@ -131,7 +130,7 @@ public class HsWebServices {
       try {
         sBegin = Long.parseLong(startedBegin);
       } catch (NumberFormatException e) {
-        throw new BadRequestException(e.getMessage());
+        throw new BadRequestException("Invalid number format: " + e.getMessage());
       }
       if (sBegin < 0) {
         throw new BadRequestException("startedTimeBegin must be greater than 0");
@@ -142,7 +141,7 @@ public class HsWebServices {
       try {
         sEnd = Long.parseLong(startedEnd);
       } catch (NumberFormatException e) {
-        throw new BadRequestException(e.getMessage());
+        throw new BadRequestException("Invalid number format: " + e.getMessage());
       }
       if (sEnd < 0) {
         throw new BadRequestException("startedTimeEnd must be greater than 0");
@@ -158,10 +157,10 @@ public class HsWebServices {
       try {
         fBegin = Long.parseLong(finishBegin);
       } catch (NumberFormatException e) {
-        throw new BadRequestException(e.getMessage());
+        throw new BadRequestException("Invalid number format: " + e.getMessage());
       }
       if (fBegin < 0) {
-        throw new BadRequestException("finishTimeBegin must be greater than 0");
+        throw new BadRequestException("finishedTimeBegin must be greater than 0");
       }
     }
     if (finishEnd != null && !finishEnd.isEmpty()) {
@@ -169,15 +168,15 @@ public class HsWebServices {
       try {
         fEnd = Long.parseLong(finishEnd);
       } catch (NumberFormatException e) {
-        throw new BadRequestException(e.getMessage());
+        throw new BadRequestException("Invalid number format: " + e.getMessage());
       }
       if (fEnd < 0) {
-        throw new BadRequestException("finishTimeEnd must be greater than 0");
+        throw new BadRequestException("finishedTimeEnd must be greater than 0");
       }
     }
     if (fBegin > fEnd) {
       throw new BadRequestException(
-          "finishTimeEnd must be greater than finishTimeBegin");
+          "finishedTimeEnd must be greater than finishedTimeBegin");
     }
 
     for (Job job : appCtx.getAllJobs().values()) {
@@ -200,7 +199,7 @@ public class HsWebServices {
       }
 
       if (userQuery != null && !userQuery.isEmpty()) {
-        if (!jobInfo.getName().equals(userQuery)) {
+        if (!jobInfo.getUserName().equals(userQuery)) {
           continue;
         }
       }
@@ -224,29 +223,17 @@ public class HsWebServices {
   @Path("/mapreduce/jobs/{jobid}")
   @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
   public JobInfo getJob(@PathParam("jobid") String jid) {
-    JobId jobId = MRApps.toJobID(jid);
-    if (jobId == null) {
-      throw new NotFoundException("job, " + jid + ", is not found");
-    }
-    Job job = appCtx.getJob(jobId);
-    if (job == null) {
-      throw new NotFoundException("job, " + jid + ", is not found");
-    }
+
+    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
     return new JobInfo(job);
   }
 
   @GET
-  @Path("/mapreduce/jobs/{jobid}/attempts")
+  @Path("/mapreduce/jobs/{jobid}/jobattempts")
   @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
   public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) {
-    JobId jobId = MRApps.toJobID(jid);
-    if (jobId == null) {
-      throw new NotFoundException("job, " + jid + ", is not found");
-    }
-    Job job = appCtx.getJob(jobId);
-    if (job == null) {
-      throw new NotFoundException("job, " + jid + ", is not found");
-    }
+
+    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
     AMAttemptsInfo amAttempts = new AMAttemptsInfo();
     for (AMInfo amInfo : job.getAMInfos()) {
       AMAttemptInfo attempt = new AMAttemptInfo(amInfo, MRApps.toString(job
@@ -261,53 +248,17 @@ public class HsWebServices {
   @Path("/mapreduce/jobs/{jobid}/counters")
   @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
   public JobCounterInfo getJobCounters(@PathParam("jobid") String jid) {
-    JobId jobId = MRApps.toJobID(jid);
-    if (jobId == null) {
-      throw new NotFoundException("job, " + jid + ", is not found");
-    }
-    Job job = appCtx.getJob(jobId);
-    if (job == null) {
-      throw new NotFoundException("job, " + jid + ", is not found");
-    }
-    return new JobCounterInfo(this.appCtx, job);
-  }
 
-  @GET
-  @Path("/mapreduce/jobs/{jobid}/tasks/{taskid}/counters")
-  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
-  public JobTaskCounterInfo getSingleTaskCounters(
-      @PathParam("jobid") String jid, @PathParam("taskid") String tid) {
-    JobId jobId = MRApps.toJobID(jid);
-    if (jobId == null) {
-      throw new NotFoundException("job, " + jid + ", is not found");
-    }
-    Job job = this.appCtx.getJob(jobId);
-    if (job == null) {
-      throw new NotFoundException("job, " + jid + ", is not found");
-    }
-    TaskId taskID = MRApps.toTaskID(tid);
-    if (taskID == null) {
-      throw new NotFoundException("taskid " + tid + " not found or invalid");
-    }
-    Task task = job.getTask(taskID);
-    if (task == null) {
-      throw new NotFoundException("task not found with id " + tid);
-    }
-    return new JobTaskCounterInfo(task);
+    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+    return new JobCounterInfo(this.appCtx, job);
   }
 
   @GET
   @Path("/mapreduce/jobs/{jobid}/conf")
   @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
   public ConfInfo getJobConf(@PathParam("jobid") String jid) {
-    JobId jobId = MRApps.toJobID(jid);
-    if (jobId == null) {
-      throw new NotFoundException("job, " + jid + ", is not found");
-    }
-    Job job = appCtx.getJob(jobId);
-    if (job == null) {
-      throw new NotFoundException("job, " + jid + ", is not found");
-    }
+
+    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
     ConfInfo info;
     try {
       info = new ConfInfo(job, this.conf);
@@ -315,7 +266,6 @@ public class HsWebServices {
       throw new NotFoundException("unable to load configuration for job: "
           + jid);
     }
-
     return info;
   }
 
@@ -324,10 +274,8 @@ public class HsWebServices {
   @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
   public TasksInfo getJobTasks(@PathParam("jobid") String jid,
       @QueryParam("type") String type) {
-    Job job = this.appCtx.getJob(MRApps.toJobID(jid));
-    if (job == null) {
-      throw new NotFoundException("job, " + jid + ", is not found");
-    }
+
+    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
     TasksInfo allTasks = new TasksInfo();
     for (Task task : job.getTasks().values()) {
       TaskType ttype = null;
@@ -351,10 +299,20 @@ public class HsWebServices {
   @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
   public TaskInfo getJobTask(@PathParam("jobid") String jid,
       @PathParam("taskid") String tid) {
-    Job job = this.appCtx.getJob(MRApps.toJobID(jid));
-    if (job == null) {
-      throw new NotFoundException("job, " + jid + ", is not found");
-    }
+
+    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+    Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
+    return new TaskInfo(task);
+
+  }
+
+  @GET
+  @Path("/mapreduce/jobs/{jobid}/tasks/{taskid}/counters")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public JobTaskCounterInfo getSingleTaskCounters(
+      @PathParam("jobid") String jid, @PathParam("taskid") String tid) {
+
+    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
     TaskId taskID = MRApps.toTaskID(tid);
     if (taskID == null) {
       throw new NotFoundException("taskid " + tid + " not found or invalid");
@@ -363,8 +321,7 @@ public class HsWebServices {
     if (task == null) {
       throw new NotFoundException("task not found with id " + tid);
     }
-    return new TaskInfo(task);
-
+    return new JobTaskCounterInfo(task);
   }
 
   @GET
@@ -372,19 +329,10 @@ public class HsWebServices {
   @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
   public TaskAttemptsInfo getJobTaskAttempts(@PathParam("jobid") String jid,
       @PathParam("taskid") String tid) {
+
     TaskAttemptsInfo attempts = new TaskAttemptsInfo();
-    Job job = this.appCtx.getJob(MRApps.toJobID(jid));
-    if (job == null) {
-      throw new NotFoundException("job, " + jid + ", is not found");
-    }
-    TaskId taskID = MRApps.toTaskID(tid);
-    if (taskID == null) {
-      throw new NotFoundException("taskid " + tid + " not found or invalid");
-    }
-    Task task = job.getTask(taskID);
-    if (task == null) {
-      throw new NotFoundException("task not found with id " + tid);
-    }
+    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+    Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
     for (TaskAttempt ta : task.getAttempts().values()) {
       if (ta != null) {
         if (task.getType() == TaskType.REDUCE) {
@@ -402,28 +350,11 @@ public class HsWebServices {
   @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
   public TaskAttemptInfo getJobTaskAttemptId(@PathParam("jobid") String jid,
       @PathParam("taskid") String tid, @PathParam("attemptid") String attId) {
-    Job job = this.appCtx.getJob(MRApps.toJobID(jid));
-    if (job == null) {
-      throw new NotFoundException("job, " + jid + ", is not found");
-    }
-    TaskId taskID = MRApps.toTaskID(tid);
-    if (taskID == null) {
-      throw new NotFoundException("taskid " + tid + " not found or invalid");
-    }
-    Task task = job.getTask(taskID);
-    if (task == null) {
-      throw new NotFoundException("task not found with id " + tid);
-    }
-    TaskAttemptId attemptId = MRApps.toTaskAttemptID(attId);
-    if (attemptId == null) {
-      throw new NotFoundException("task attempt id " + attId
-          + " not found or invalid");
-    }
-    TaskAttempt ta = task.getAttempt(attemptId);
-    if (ta == null) {
-      throw new NotFoundException("Error getting info on task attempt id "
-          + attId);
-    }
+
+    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+    Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
+    TaskAttempt ta = AMWebServices.getTaskAttemptFromTaskAttemptString(attId,
+        task);
     if (task.getType() == TaskType.REDUCE) {
       return new ReduceTaskAttemptInfo(ta, task.getType());
     } else {
@@ -437,32 +368,11 @@ public class HsWebServices {
   public JobTaskAttemptCounterInfo getJobTaskAttemptIdCounters(
       @PathParam("jobid") String jid, @PathParam("taskid") String tid,
       @PathParam("attemptid") String attId) {
-    JobId jobId = MRApps.toJobID(jid);
-    if (jobId == null) {
-      throw new NotFoundException("job, " + jid + ", is not found");
-    }
-    Job job = this.appCtx.getJob(jobId);
-    if (job == null) {
-      throw new NotFoundException("job, " + jid + ", is not found");
-    }
-    TaskId taskID = MRApps.toTaskID(tid);
-    if (taskID == null) {
-      throw new NotFoundException("taskid " + tid + " not found or invalid");
-    }
-    Task task = job.getTask(taskID);
-    if (task == null) {
-      throw new NotFoundException("task not found with id " + tid);
-    }
-    TaskAttemptId attemptId = MRApps.toTaskAttemptID(attId);
-    if (attemptId == null) {
-      throw new NotFoundException("task attempt id " + attId
-          + " not found or invalid");
-    }
-    TaskAttempt ta = task.getAttempt(attemptId);
-    if (ta == null) {
-      throw new NotFoundException("Error getting info on task attempt id "
-          + attId);
-    }
+
+    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+    Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
+    TaskAttempt ta = AMWebServices.getTaskAttemptFromTaskAttemptString(attId,
+        task);
     return new JobTaskAttemptCounterInfo(ta);
   }
 

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/JAXBContextResolver.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/JAXBContextResolver.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/JAXBContextResolver.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/JAXBContextResolver.java Mon Jan 16 04:24:24 2012
@@ -42,11 +42,14 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskCounterGroupInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskCounterInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TasksInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
+
 import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo;
 import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptsInfo;
 import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.HistoryInfo;
 import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
 import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
+import org.apache.hadoop.yarn.webapp.RemoteExceptionData;
 
 @Singleton
 @Provider
@@ -57,13 +60,13 @@ public class JAXBContextResolver impleme
 
   // you have to specify all the dao classes here
   private final Class[] cTypes = { HistoryInfo.class, JobInfo.class,
-      JobsInfo.class, TasksInfo.class, TaskAttemptsInfo.class, ConfInfo.class,
-      CounterInfo.class, JobTaskCounterInfo.class,
-      JobTaskAttemptCounterInfo.class, 
-      TaskCounterInfo.class, JobCounterInfo.class, ReduceTaskAttemptInfo.class,
-      TaskAttemptInfo.class, TaskAttemptsInfo.class, CounterGroupInfo.class,
-      TaskCounterGroupInfo.class, 
-      AMAttemptInfo.class, AMAttemptsInfo.class};
+      JobsInfo.class, TaskInfo.class, TasksInfo.class, TaskAttemptsInfo.class,
+      ConfInfo.class, CounterInfo.class, JobTaskCounterInfo.class,
+      JobTaskAttemptCounterInfo.class, TaskCounterInfo.class,
+      JobCounterInfo.class, ReduceTaskAttemptInfo.class, TaskAttemptInfo.class,
+      TaskAttemptsInfo.class, CounterGroupInfo.class,
+      TaskCounterGroupInfo.class, AMAttemptInfo.class, AMAttemptsInfo.class,
+      RemoteExceptionData.class };
 
   public JAXBContextResolver() throws Exception {
     this.types = new HashSet<Class>(Arrays.asList(cTypes));

Modified: hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java Mon Jan 16 04:24:24 2012
@@ -26,10 +26,11 @@ import javax.xml.bind.annotation.XmlRoot
 import javax.xml.bind.annotation.XmlTransient;
 
 import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
+import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.hadoop.yarn.util.BuilderUtils;
 
-@XmlRootElement(name = "amAttempt")
+@XmlRootElement(name = "jobAttempt")
 @XmlAccessorType(XmlAccessType.FIELD)
 public class AMAttemptInfo {
 
@@ -48,21 +49,30 @@ public class AMAttemptInfo {
 
   public AMAttemptInfo(AMInfo amInfo, String jobId, String user, String host,
       String pathPrefix) {
-    this.nodeHttpAddress = amInfo.getNodeManagerHost() + ":"
-        + amInfo.getNodeManagerHttpPort();
-    NodeId nodeId = BuilderUtils.newNodeId(amInfo.getNodeManagerHost(),
-        amInfo.getNodeManagerPort());
-    this.nodeId = nodeId.toString();
+    this.nodeHttpAddress = "";
+    this.nodeId = "";
+    String nmHost = amInfo.getNodeManagerHost();
+    int nmHttpPort = amInfo.getNodeManagerHttpPort();
+    int nmPort = amInfo.getNodeManagerPort();
+    if (nmHost != null) {
+      this.nodeHttpAddress = nmHost + ":" + nmHttpPort;
+      NodeId nodeId = BuilderUtils.newNodeId(nmHost, nmPort);
+      this.nodeId = nodeId.toString();
+    }
+
     this.id = amInfo.getAppAttemptId().getAttemptId();
     this.startTime = amInfo.getStartTime();
-    this.containerId = amInfo.getContainerId().toString();
-    this.logsLink = join(
-        host,
-        pathPrefix,
-        ujoin("logs", nodeId.toString(), amInfo.getContainerId().toString(),
-            jobId, user));
-    this.shortLogsLink = ujoin("logs", nodeId.toString(), amInfo
-        .getContainerId().toString(), jobId, user);
+    this.containerId = "";
+    this.logsLink = "";
+    this.shortLogsLink = "";
+    ContainerId containerId = amInfo.getContainerId();
+    if (containerId != null) {
+      this.containerId = containerId.toString();
+      this.logsLink = join(host, pathPrefix,
+          ujoin("logs", this.nodeId, this.containerId, jobId, user));
+      this.shortLogsLink = ujoin("logs", this.nodeId, this.containerId,
+          jobId, user);
+    }
   }
 
   public String getNodeHttpAddress() {



Mime
View raw message