kafka-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jkr...@apache.org
Subject [26/30] git commit: DumpLogSegment offset verification is incorrect for compressed messages (second fix); patched by Yang Ye; reviewed by Jun Rao; KAFKA-614
Date Tue, 18 Dec 2012 17:44:12 GMT
DumpLogSegment offset verification is incorrect for compressed messages (second fix); patched
by Yang Ye; reviewed by Jun Rao; KAFKA-614

git-svn-id: https://svn.apache.org/repos/asf/kafka/branches/0.8@1415004 13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/kafka/repo
Commit: http://git-wip-us.apache.org/repos/asf/kafka/commit/e5560635
Tree: http://git-wip-us.apache.org/repos/asf/kafka/tree/e5560635
Diff: http://git-wip-us.apache.org/repos/asf/kafka/diff/e5560635

Branch: refs/heads/trunk
Commit: e556063520193c8065d7f8e39153d23dae8cf8b8
Parents: 7b2d9c3
Author: Jun Rao <junrao@apache.org>
Authored: Thu Nov 29 00:09:31 2012 +0000
Committer: Jun Rao <junrao@apache.org>
Committed: Thu Nov 29 00:09:31 2012 +0000

----------------------------------------------------------------------
 .../main/scala/kafka/tools/DumpLogSegments.scala   |   16 +++++++-------
 1 files changed, 8 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kafka/blob/e5560635/core/src/main/scala/kafka/tools/DumpLogSegments.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/tools/DumpLogSegments.scala b/core/src/main/scala/kafka/tools/DumpLogSegments.scala
index 167cf10..31333e7 100644
--- a/core/src/main/scala/kafka/tools/DumpLogSegments.scala
+++ b/core/src/main/scala/kafka/tools/DumpLogSegments.scala
@@ -57,8 +57,8 @@ object DumpLogSegments {
     val verifyOnly = if(options.has(verifyOpt)) true else false
     val files = options.valueOf(filesOpt).split(",")
 
-    val misMatchesForIndexFilesMap = new mutable.HashMap[String, List[(Int, Int)]]
-    val nonConsecutivePairsForLogFilesMap = new mutable.HashMap[String, List[(Int, Int)]]
+    val misMatchesForIndexFilesMap = new mutable.HashMap[String, List[(Long, Long)]]
+    val nonConsecutivePairsForLogFilesMap = new mutable.HashMap[String, List[(Long, Long)]]
 
     for(arg <- files) {
       val file = new File(arg)
@@ -89,7 +89,7 @@ object DumpLogSegments {
   }
   
   /* print out the contents of the index */
-  private def dumpIndex(file: File, verifyOnly: Boolean, misMatchesForIndexFilesMap: mutable.HashMap[String,
List[(Int, Int)]]) {
+  private def dumpIndex(file: File, verifyOnly: Boolean, misMatchesForIndexFilesMap: mutable.HashMap[String,
List[(Long, Long)]]) {
     val startOffset = file.getName().split("\\.")(0).toLong
     val logFileName = file.getAbsolutePath.split("\\.")(0) + Log.LogFileSuffix
     val logFile = new File(logFileName)
@@ -100,8 +100,8 @@ object DumpLogSegments {
       val partialFileMessageSet: FileMessageSet = messageSet.read(entry.position, messageSet.sizeInBytes())
       val messageAndOffset = partialFileMessageSet.head
       if(messageAndOffset.offset != entry.offset + index.baseOffset) {
-        var misMatchesSeq = misMatchesForIndexFilesMap.getOrElse(file.getName, List[(Int,
Int)]())
-        misMatchesSeq ::=((entry.offset + index.baseOffset, messageAndOffset.offset).asInstanceOf[(Int,
Int)])
+        var misMatchesSeq = misMatchesForIndexFilesMap.getOrElse(file.getName, List[(Long,
Long)]())
+        misMatchesSeq ::=(entry.offset + index.baseOffset, messageAndOffset.offset)
         misMatchesForIndexFilesMap.put(file.getName, misMatchesSeq)
       }
       // since it is a sparse file, in the event of a crash there may be many zero entries,
stop if we see one
@@ -113,7 +113,7 @@ object DumpLogSegments {
   }
   
   /* print out the contents of the log */
-  private def dumpLog(file: File, printContents: Boolean, nonConsecutivePairsForLogFilesMap:
mutable.HashMap[String, List[(Int, Int)]]) {
+  private def dumpLog(file: File, printContents: Boolean, nonConsecutivePairsForLogFilesMap:
mutable.HashMap[String, List[(Long, Long)]]) {
     val startOffset = file.getName().split("\\.")(0).toLong
     println("Starting offset: " + startOffset)
     val messageSet = new FileMessageSet(file)
@@ -126,8 +126,8 @@ object DumpLogSegments {
         lastOffset = messageAndOffset.offset
       // If it's uncompressed message, its offset must be lastOffset + 1 no matter last message
is compressed or uncompressed
       else if (msg.compressionCodec == NoCompressionCodec && messageAndOffset.offset
!= lastOffset +1) {
-        var nonConsecutivePairsSeq = nonConsecutivePairsForLogFilesMap.getOrElse(file.getName,
List[(Int, Int)]())
-        nonConsecutivePairsSeq ::=((lastOffset, messageAndOffset.offset).asInstanceOf[(Int,
Int)])
+        var nonConsecutivePairsSeq = nonConsecutivePairsForLogFilesMap.getOrElse(file.getName,
List[(Long, Long)]())
+        nonConsecutivePairsSeq ::=(lastOffset, messageAndOffset.offset)
         nonConsecutivePairsForLogFilesMap.put(file.getName, nonConsecutivePairsSeq)
       }
       lastOffset = messageAndOffset.offset


Mime
View raw message