jmeter-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From Felix Schumacher <felix.schumac...@internetallee.de>
Subject Re: svn commit: r1719558 - /jmeter/trunk/src/core/org/apache/jmeter/report/processor/ExternalSampleSorter.java
Date Fri, 11 Dec 2015 21:24:16 GMT
Am 11.12.2015 um 22:01 schrieb pmouawad@apache.org:
> Author: pmouawad
> Date: Fri Dec 11 21:01:25 2015
> New Revision: 1719558
>
> URL: http://svn.apache.org/viewvc?rev=1719558&view=rev
> Log:
> Use Java7 try with resources
> Close stream leak
> Remove commented code
> Fix synchro issue in comparison
>
> Modified:
>      jmeter/trunk/src/core/org/apache/jmeter/report/processor/ExternalSampleSorter.java
>
> Modified: jmeter/trunk/src/core/org/apache/jmeter/report/processor/ExternalSampleSorter.java
> URL: http://svn.apache.org/viewvc/jmeter/trunk/src/core/org/apache/jmeter/report/processor/ExternalSampleSorter.java?rev=1719558&r1=1719557&r2=1719558&view=diff
> ==============================================================================
> --- jmeter/trunk/src/core/org/apache/jmeter/report/processor/ExternalSampleSorter.java
(original)
> +++ jmeter/trunk/src/core/org/apache/jmeter/report/processor/ExternalSampleSorter.java
Fri Dec 11 21:01:25 2015
> @@ -38,8 +38,6 @@ import org.apache.jmeter.report.core.Sam
>   import org.apache.jmeter.report.core.SampleComparator;
>   import org.apache.jmeter.report.core.SampleException;
>   import org.apache.jmeter.report.core.SampleMetadata;
> -import org.apache.jmeter.report.processor.AbstractSampleConsumer;
> -import org.apache.jmeter.report.processor.SampleProducer;
>   import org.slf4j.Logger;
>   import org.slf4j.LoggerFactory;
>   
> @@ -198,13 +196,11 @@ public class ExternalSampleSorter extend
>                       outputFile.getAbsolutePath()
>                               + " is a directory. Please provide a valid output sample
file path (not a directory)");
>           }
> -        CsvSampleReader csvReader = new CsvSampleReader(inputFile,
> -                inputFile.getSeparator(), false);
> -        try {
> +
> +        try (CsvSampleReader csvReader = new CsvSampleReader(inputFile,
> +                inputFile.getSeparator(), false)){
>               sort(csvReader, outputFile, writeHeader);
> -        } finally {
> -            csvReader.close();
> -        }
> +        }
>       }
>   
>       /**
> @@ -246,12 +242,9 @@ public class ExternalSampleSorter extend
>                       outputFile.getAbsolutePath()
>                               + " is a directory. Please provide a valid output sample
file path (not a directory)");
>           }
> -        CsvSampleReader csvReader = new CsvSampleReader(inputFile,
> -                sampleMetadata);
> -        try {
> +        try (CsvSampleReader csvReader = new CsvSampleReader(inputFile,
> +                sampleMetadata)){
>               sort(csvReader, outputFile, writeHeader);
> -        } finally {
> -            csvReader.close();
>           }
>       }
>   
> @@ -304,7 +297,7 @@ public class ExternalSampleSorter extend
>           File workDir = getWorkingDirectory();
>           workDir.mkdir();
>           this.pool.prestartAllCoreThreads();
> -        inputSampleCount.set(0);;
> +        inputSampleCount.set(0);
>           chunkedSampleCount.set(0);
>           chunks = new LinkedList<>();
>           samples = new LinkedList<>();
> @@ -331,7 +324,7 @@ public class ExternalSampleSorter extend
>               log.debug("sort(): " + inputSampleCount.longValue()
>                       + " samples read from input, " + chunkedSampleCount.longValue()
>                       + " samples written to chunk files");
> -            if (inputSampleCount.get() != chunkedSampleCount.get()) {
> +            if (!inputSampleCount.equals(chunkedSampleCount)) {
AtomicLong does not implement equals the way you might think. It is 
rather the normal Object#equals method.

Thus

(new AtomicLong(0)).equals(new AtomicLong(0)) == false !

So I think the previous version was correct.

Regards,
  Felix
>                   log.error("Failure! Number of samples read from input and written to
chunk files differ");
>               } else {
>                   log.info("chunked samples dumps succeeded.");
> @@ -371,14 +364,11 @@ public class ExternalSampleSorter extend
>               log.debug("sortAndDump(): Dumping chunk " + out);
>               start = System.currentTimeMillis();
>           }
> -        CsvSampleWriter csvWriter = new CsvSampleWriter(out, sampleMetadata);
> -        try {
> +        try (CsvSampleWriter csvWriter = new CsvSampleWriter(out, sampleMetadata)){
>               for (Sample sample : sortedSamples) {
>                   csvWriter.write(sample);
>                   chunkedSampleCount.incrementAndGet();
>               }
> -        } finally {
> -            csvWriter.close();
>           }
>           if (log.isDebugEnabled()) {
>               stop = System.currentTimeMillis();
> @@ -509,29 +499,15 @@ public class ExternalSampleSorter extend
>               mergeFiles(metadata, leftFile, rightFile, out);
>           } else {
>               File f = chunks.get(0);
> -            CsvSampleReader reader = new CsvSampleReader(f, metadata);
> -            Sample s = null;
> -            while ((s = reader.readSample()) != null) {
> -                out.produce(s, 0);
> +            try (CsvSampleReader reader = new CsvSampleReader(f, metadata)) {
> +        	Sample s = null;
> +        	while ((s = reader.readSample()) != null) {
> +        	    out.produce(s, 0);
> +        	}
>               }
>           }
>       }
>   
> -    // private static long countSamples(File f, SampleMetadata metadata) {
> -    // long out = 0;
> -    // CsvSampleReader reader = null;
> -    //
> -    // if (metadata != null) {
> -    // reader = new CsvSampleReader(f, metadata);
> -    // } else {
> -    // reader = new CsvSampleReader(f, ';');
> -    // }
> -    // while (reader.readSample() != null) {
> -    // out++;
> -    // }
> -    // return out;
> -    // }
> -
>       private File mergeSortFiles(List<File> chunks, SampleMetadata metadata) {
>           int sz = chunks.size();
>           if (sz == 1) {
> @@ -556,10 +532,10 @@ public class ExternalSampleSorter extend
>           if (out == null) {
>               out = getChunkFile();
>           }
> -        CsvSampleWriter csvWriter = new CsvSampleWriter(out, metadata);
> -        CsvSampleReader l = new CsvSampleReader(left, metadata);
> -        CsvSampleReader r = new CsvSampleReader(right, metadata);
> -        try {
> +
> +        try (CsvSampleWriter csvWriter = new CsvSampleWriter(out, metadata);
> +        	CsvSampleReader l = new CsvSampleReader(left, metadata);
> +                CsvSampleReader r = new CsvSampleReader(right, metadata)) {
>               if (writeHeader) {
>                   csvWriter.writeHeader();
>               }
> @@ -583,18 +559,13 @@ public class ExternalSampleSorter extend
>                       csvWriter.write(r.readSample());
>                   }
>               }
> -        } finally {
> -            csvWriter.close();
> -            l.close();
> -            r.close();
>           }
>       }
>   
>       private void mergeFiles(SampleMetadata metadata, File left, File right,
>               SampleProducer out) {
> -        CsvSampleReader l = new CsvSampleReader(left, metadata);
> -        CsvSampleReader r = new CsvSampleReader(right, metadata);
> -        try {
> +        try (CsvSampleReader l = new CsvSampleReader(left, metadata);
> +        	CsvSampleReader r = new CsvSampleReader(right, metadata)){
>               while (l.hasNext() || r.hasNext()) {
>                   if (l.hasNext() && r.hasNext()) {
>                       Sample firstLeft = l.peek();
> @@ -615,9 +586,6 @@ public class ExternalSampleSorter extend
>                       out.produce(r.readSample(), 0);
>                   }
>               }
> -        } finally {
> -            l.close();
> -            r.close();
>           }
>       }
>   
> @@ -645,35 +613,4 @@ public class ExternalSampleSorter extend
>       public final void setRevertedSort(boolean revertedSort) {
>           this.revertedSort = revertedSort;
>       }
> -
> -    // private static void test(String wd, String in, String out) {
> -    // File workDir = new File(wd);
> -    //
> -    // CsvFile input = new CsvFile(in, ';');
> -    // File output = new File(out);
> -    //
> -    // ElapsedSampleComparator comparator = new ElapsedSampleComparator();
> -    // ExternalSampleSorter sorter = new ExternalSampleSorter();
> -    // sorter.setWorkDir(workDir);
> -    // sorter.setChunkSize(800000);
> -    // sorter.setSampleComparator(comparator);
> -    // sorter.setParallelize(true);
> -    // for (int i = 0; i < 1; i++) {
> -    // long start = System.currentTimeMillis();
> -    // sorter.sort(input, output, true);
> -    // long stop = System.currentTimeMillis();
> -    // log.info((stop - start) / 1000f / 60f + " m");
> -    // log.debug("Checking output sample count...");
> -    // long sampleCount = countSamples(output, null);
> -    // log.debug("Counted " + sampleCount +
> -    // " samples in generated sorted file : output=" + output.length() +
> -    // " bytes, input=" + input.length() + " bytes");
> -    // if (input.length() != output.length()) {
> -    // log.error("Sort failed ! sizes differ.");
> -    // } else {
> -    // log.info("sort success !");
> -    // }
> -    // }
> -    // }
> -
>   }
>
>


Mime
View raw message