[med-svn] [Git][med-team/pixelmed-codec][master] 7 commits: d/control: Update Homepage to direct location

Mathieu Malaterre gitlab at salsa.debian.org
Fri Jul 31 09:37:51 BST 2020



Mathieu Malaterre pushed to branch master at Debian Med / pixelmed-codec


Commits:
6c2be3cb by Mathieu Malaterre at 2020-07-31T09:09:42+02:00
d/control: Update Homepage to direct location

- - - - -
6c6af782 by Mathieu Malaterre at 2020-07-31T09:14:49+02:00
d/copyright: Update Source URL

- - - - -
27ad939d by Mathieu Malaterre at 2020-07-31T09:24:54+02:00
New upstream version 20200328
- - - - -
6d8faa6a by Mathieu Malaterre at 2020-07-31T09:24:54+02:00
Update upstream source from tag 'upstream/20200328'

Update to upstream version '20200328'
with Debian dir ce3e3fd6b72645ca51ec1cd72b1d87173162eaea
- - - - -
929a1546 by Mathieu Malaterre at 2020-07-31T10:28:02+02:00
d/control: Bump Std-Vers to 4.5.0; no changes needed

- - - - -
9e0ec63e by Mathieu Malaterre at 2020-07-31T10:35:02+02:00
d/rules: Remove BUILDDATE file

- - - - -
ea1c3ab5 by Mathieu Malaterre at 2020-07-31T10:36:26+02:00
d/changelog: Upload latest release

- - - - -


13 changed files:

- COPYRIGHT
- + com/pixelmed/codec/jpeg/EntropyCodedSegment copy.java
- com/pixelmed/codec/jpeg/EntropyCodedSegment.java
- com/pixelmed/codec/jpeg/HuffmanTable.java
- com/pixelmed/codec/jpeg/Makefile
- com/pixelmed/codec/jpeg/Markers.java
- com/pixelmed/codec/jpeg/Parse.java
- com/pixelmed/codec/jpeg/package.html
- com/pixelmed/imageio/Makefile
- debian/changelog
- debian/control
- debian/copyright
- debian/rules


Changes:

=====================================
COPYRIGHT
=====================================
@@ -1,4 +1,4 @@
-Copyright (c) 2014, David A. Clunie DBA PixelMed Publishing. All rights reserved.
+Copyright (c) 2014-2017, David A. Clunie DBA PixelMed Publishing. All rights reserved.
 
 Redistribution and use in source and binary forms, with or without modification, are
 permitted provided that the following conditions are met:


=====================================
com/pixelmed/codec/jpeg/EntropyCodedSegment copy.java
=====================================
@@ -0,0 +1,684 @@
+/* Copyright (c) 2014-2015, David A. Clunie DBA Pixelmed Publishing. All rights reserved. */
+
+package com.pixelmed.codec.jpeg;
+
+import java.awt.Rectangle;
+import java.awt.Shape;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Vector;
+
+/**
+ * <p>A JPEG Entropy Coded Segment.</p>
+ *
+ * <p>Development of this class was supported by funding from MDDX Research and Informatics.</p>
+ *
+ * @author	dclunie
+ */
+public class EntropyCodedSegment {
+
+	private static final String identString = "@(#) $Header: /userland/cvs/codec/com/pixelmed/codec/jpeg/EntropyCodedSegment.java,v 1.24 2016/01/16 13:30:09 dclunie Exp $";
+
+	private boolean copying;
+	private boolean decompressing;
+
+	private OutputArrayOrStream[] decompressedOutputPerComponent;
+
+	private boolean isHuffman;
+	private boolean isDCT;
+	private boolean isLossless;
+
+	private ByteArrayOutputStream copiedBytes;
+		
+ 	private final MarkerSegmentSOS sos;
+ 	private final MarkerSegmentSOF sof;
+ 	private final Map<String,HuffmanTable> htByClassAndIdentifer;
+ 	private final Map<String,QuantizationTable> qtByIdentifer;
+
+ 	private final int nComponents;
+ 	private final int[] DCEntropyCodingTableSelector;
+ 	private final int[] ACEntropyCodingTableSelector;
+ 	private final int[] HorizontalSamplingFactor;
+ 	private final int[] VerticalSamplingFactor;
+		
+ 	private final int maxHorizontalSamplingFactor;
+ 	private final int maxVerticalSamplingFactor;
+	
+	private final int nMCUHorizontally;
+	
+	private final Vector<Shape> redactionShapes;
+
+	// stuff for lossless decompression ...
+	private final int predictorForFirstSample;
+ 	private final int[] predictorForComponent;
+	private final int predictorSelectionValue;
+
+	// these are class level and used by getOneLosslessValue() to maintain state (updates them) and initialized by constructor
+ 	private int[] rowNumberAtBeginningOfRestartInterval;	// indexed by component number, not final since set at beginning of each
+ 	private final int[] rowLength;							// indexed by component number
+ 	private final int[] currentRowNumber;					// indexed by component number
+ 	private final int[] positionWithinRow;					// indexed by component number
+ 	private final int[][] previousReconstructedRow;			// indexed by component number, positionWithinRow
+ 	private final int[][] currentReconstructedRow;			// indexed by component number, positionWithinRow
+
+	// stuff for bit extraction ...
+	// copied from com.pixelmed.scpecg.HuffmanDecoder ...
+	private byte[] bytesToDecompress;
+	private int availableBytes;
+	private int byteIndex;
+	private int bitIndex;
+	private int currentByte;
+	private int currentBits;
+	private int haveBits;
+
+	private static final int[] extractBitFromByteMask = { 0x80,0x40,0x20,0x10,0x08,0x04,0x02,0x01 };
+	
+	private final void getEnoughBits(int wantBits) throws Exception {
+		while (haveBits < wantBits) {
+			if (bitIndex > 7) {
+				if (byteIndex < availableBytes) {
+					currentByte=bytesToDecompress[byteIndex++];
+//System.err.println("currentByte["+byteIndex+"] now = 0x"+Integer.toHexString(currentByte&0xff)+" "+Integer.toBinaryString(currentByte&0xff));
+					bitIndex=0;
+				}
+				else {
+					throw new Exception("No more bits (having decompressed "+byteIndex+" dec bytes)");
+				}
+			}
+			int newBit = (currentByte & extractBitFromByteMask[bitIndex++]) == 0 ? 0 : 1;
+			currentBits = (currentBits << 1) + newBit;
+			++haveBits;
+		}
+//System.err.println("getEnoughBits(): returning "+haveBits+" bits "+Integer.toBinaryString(currentBits)+" (ending at byte "+byteIndex+" bit "+(bitIndex-1)+")");
+	}
+	
+	private int writeByte;		// only contains meaningful content when writeBitIndex > 0
+	private int writeBitIndex;	// 0 means ready to write 1st (high) bit to writeByte, 7 means ready to write last (low) bit to writeByte, will transiently (inside writeBits only) be 8 to signal new byte needed
+	
+	private final void initializeWriteBits() {
+		copiedBytes = new ByteArrayOutputStream();
+		writeByte = 0;
+		writeBitIndex = 0;	// start writing into 1st (high) bit of writeByte
+	}
+	
+	private final void flushWriteBits() {
+		if (writeBitIndex > 0) {
+			// bits have been written to writeByte so need to pad it with 1s and write it
+			while (writeBitIndex < 8) {
+				writeByte = writeByte | extractBitFromByteMask[writeBitIndex];
+				++writeBitIndex;
+			}
+			copiedBytes.write(writeByte);
+			if ((writeByte&0xff) == 0xff) {
+				copiedBytes.write(0);	// stuffed zero byte after 0xff to prevent being considered marker
+			}
+			writeByte=0;
+			writeBitIndex=0;
+		}
+		// else have not written any bits to writeByte, so do nothing
+	}
+	
+	private final void writeBits(int bits,int nBits) {
+//System.err.println("writeBits(): writing "+nBits+" bits "+Integer.toBinaryString(bits));
+		if (nBits > 0) {
+			for (int i=nBits-1; i>=0; --i) {
+				final int whichBitMask = 1 << i;			// bits are "big endian"
+				final int bitIsSet = bits & whichBitMask;	// zero or not zero
+				// do not need to check writeBitIndex before "writing" ... will always be "ready"
+				if (bitIsSet != 0) {
+					writeByte = writeByte | extractBitFromByteMask[writeBitIndex];
+				}
+				++writeBitIndex;
+				if (writeBitIndex > 7) {
+//System.err.println("writeBits(): wrote = 0x"+Integer.toHexString(writeByte&0xff)+" "+Integer.toBinaryString(writeByte&0xff));
+					copiedBytes.write(writeByte);
+					if ((writeByte&0xff) == 0xff) {
+						copiedBytes.write(0);	// stuffed zero byte after 0xff to prevent being considered marker
+					}
+					writeByte=0;
+					writeBitIndex=0;
+				}
+			}
+		}
+	}
+
+
+	
+	private HuffmanTable usingTable = null;
+	
+//int counter = 0;
+	
+	// Use 10918-1 F.2 Figure F.16 decode procedure
+	
+	/**
+	 * <p>Decode a single value.</p>
+	 *
+	 * @return	the decoded value
+	 */
+	private final int decode()  throws Exception {
+		final int[] MINCODE = usingTable.getMINCODE();
+		final int[] MAXCODE = usingTable.getMAXCODE();
+		final int[] VALPTR  = usingTable.getVALPTR();
+		final int[] HUFFVAL = usingTable.getHUFFVAL();
+	
+		int I=1;
+		getEnoughBits(I);		// modifies currentBits
+		int CODE = currentBits;
+		while (I<MAXCODE.length && CODE > MAXCODE[I]) {
+		//while (CODE > MAXCODE[I]) {
+			++I;
+//System.err.println("I = "+I);
+			getEnoughBits(I);	// modifies currentBits
+			CODE = currentBits;
+//System.err.println("CODE "+Integer.toBinaryString(CODE));
+//System.err.println("compare to MAXCODE[I] "+(I<MAXCODE.length ? Integer.toBinaryString(MAXCODE[I]) : "out of MAXCODE entries"));
+		}
+//System.err.println("Decoded CODE "+Integer.toBinaryString(CODE)+" of length "+I);
+		int VALUE = 0;
+		if (I<MAXCODE.length) {
+			int J = VALPTR[I];
+//System.err.println("Found VALPTR base "+J);
+			J = J + CODE - MINCODE[I];
+//System.err.println("Found VALPTR offset by code "+J);
+			VALUE = HUFFVAL[J];
+//System.err.println("Found VALUE "+VALUE+" dec (0x"+Integer.toHexString(VALUE)+")");
+//System.err.println("HUFF_DECODE: "+VALUE+" COUNTER "+counter);
+//++counter;
+		}
+		else {
+			//we have exceeded the maximum coded value specified :(
+			// copy IJG behavior in this situation from jdhuff.c "With garbage input we may reach the sentinel value l = 17" ... "fake a zero as the safest result"
+//System.err.println("Bad Huffman code "+Integer.toBinaryString(CODE)+" so use VALUE "+VALUE+" dec (0x"+Integer.toHexString(VALUE)+")");
+		}
+		if (copying) { writeBits(currentBits,haveBits); }
+		currentBits=0;
+		haveBits=0;
+		return VALUE;
+	}
+
+	private final void encode(int VALUE) {
+//System.err.println("Given VALUE "+VALUE+" dec (0x"+Integer.toHexString(VALUE)+")");
+		final int[] EFUFCO = usingTable.getEFUFCO();
+		final int[] EFUFSI = usingTable.getEFUFSI();
+		int CODE = EFUFCO[VALUE];
+		int size = EFUFSI[VALUE];
+//System.err.println("Encoding CODE "+Integer.toBinaryString(CODE)+" of length "+size);
+		writeBits(CODE,size);
+	}
+	
+	private final int getValueOfRequestedLength(int wantBits) throws Exception {
+		getEnoughBits(wantBits);	// modifies currentBits
+		final int value = currentBits;
+//System.err.println("getValueOfRequestedLength(): wantBits="+wantBits+" : Got value "+value+" dec (0x"+Integer.toHexString(value)+")");
+		if (copying) { writeBits(currentBits,haveBits); }
+		currentBits=0;
+		haveBits=0;
+		return value;
+	}
+
+	// values above index 11 only occur for 12 bit process ...
+	private int[] dcSignBitMask = { 0x00/*na*/,0x01,0x02,0x04,0x08,0x10,0x20,0x40,0x80,0x100,0x200,0x400,0x800,0x1000,0x2000,0x4000 /*no entry for 16*/};
+	private int[] maxAmplitude  = { 0/*na*/,0x02-1,0x04-1,0x08-1,0x10-1,0x20-1,0x40-1,0x80-1,0x100-1,0x200-1,0x400-1,0x800-1,0x1000-1,0x2000-1,0x4000-1,0x8000-1 /*no entry for 16*/};
+
+	private final int convertSignAndAmplitudeBitsToValue(int value,int length) throws Exception {
+		// see P&M Table 11-1 page 190 and Table 11-4 page 193 (same for DC and AC)
+		if (length > 0) {
+//System.err.println("dcSignBitMask = "+Integer.toHexString(dcSignBitMask[length]));
+			if ((value & dcSignBitMask[length]) == 0) {
+//System.err.println("Have sign bit");
+				value = value - maxAmplitude[length];
+			}
+		}
+		return value;
+	}
+	
+	private final int getNumberOfSignBits(int value) {
+		int ssss = 0;
+		if (value < 0) {
+			value = - value;
+		}
+		while (value > 0) {
+			++ssss;
+			value = value >> 1;
+		}
+		return ssss;
+	}
+	
+	private final int getBits(int value,int ssss) {
+		int bits = 0;
+		if (ssss > 0) {
+			if (value < 0) {	// "if ... -ve, subtract 1 ... and append the SSSS low-order bits of this result" P&M p191
+				--value;
+			}
+			// else "if ... +ve, append the SSSS low-order bits" P&M p191
+			bits = value & maxAmplitude[ssss];
+		}
+		return bits;
+	}
+
+	
+	private final void writeEntropyCodedAllZeroACCoefficients() {
+		// write a single EOB code, which is rrrrssss = 0x00;
+		writeBits(usingTable.getEOBCode(),usingTable.getEOBCodeLength());
+	}
+	
+
+	/**
+	 * <p>Set up the environment to decode an EntropyCodedSeqment to dump, redact or copy as required.</p>
+	 *
+	 * @param	sos								SOS marker segment contents
+	 * @param	sof								SOF marker segment contents
+	 * @param	htByClassAndIdentifer			Huffman tables
+	 * @param	qtByIdentifer					quantization tables
+	 * @param	nMCUHorizontally				the number of MCUs in a single row
+	 * @param	redactionShapes					a Vector of Shape that are Rectangle
+	 * @param	copying							true if copying
+	 * @param	dumping							true if dumping
+	 * @param	decompressing					true if decompressing
+	 * @param	decompressedOutput				the decompressed output (with specified or default endianness if precision > 8)
+	 * @throws Exception						if JPEG process not supported
+	 */
+	public EntropyCodedSegment(MarkerSegmentSOS sos,MarkerSegmentSOF sof,Map<String,HuffmanTable> htByClassAndIdentifer,Map<String,QuantizationTable> qtByIdentifer,int nMCUHorizontally,Vector<Shape> redactionShapes,boolean copying,boolean dumping,boolean decompressing,Parse.DecompressedOutput decompressedOutput) throws Exception {
+ 		this.sos = sos;
+ 		this.sof = sof;
+ 		this.htByClassAndIdentifer = htByClassAndIdentifer;
+ 		this.qtByIdentifer = qtByIdentifer;
+		this.nMCUHorizontally = nMCUHorizontally;
+		this.redactionShapes = redactionShapes;
+		this.copying = copying;
+		// dumping is not used other than in this constructor
+		this.decompressing = decompressing;
+		this.decompressedOutputPerComponent = decompressedOutput == null ? null : decompressedOutput.getDecompressedOutputPerComponent();
+		
+		this.isHuffman = Markers.isHuffman(sof.getMarker());
+		if (!isHuffman) {
+			throw new Exception("Only Huffman processes supported (not "+Markers.getAbbreviation(sof.getMarker())+" "+Markers.getDescription(sof.getMarker())+")");
+		}
+		this.isDCT = Markers.isDCT(sof.getMarker());
+		this.isLossless = Markers.isLossless(sof.getMarker());
+
+		nComponents = sos.getNComponentsPerScan();
+		DCEntropyCodingTableSelector = sos.getDCEntropyCodingTableSelector();
+		ACEntropyCodingTableSelector = sos.getACEntropyCodingTableSelector();
+		HorizontalSamplingFactor = sof.getHorizontalSamplingFactor();
+		VerticalSamplingFactor   = sof.getVerticalSamplingFactor();
+		
+		maxHorizontalSamplingFactor = max(HorizontalSamplingFactor);
+//System.err.println("maxHorizontalSamplingFactor "+maxHorizontalSamplingFactor);
+		maxVerticalSamplingFactor   = max(VerticalSamplingFactor);
+//System.err.println("maxVerticalSamplingFactor "+maxVerticalSamplingFactor);
+
+		if (isLossless && decompressing) {
+//System.err.println("SamplePrecision "+sof.getSamplePrecision());
+//System.err.println("SuccessiveApproximationBitPositionLowOrPointTransform "+sos.getSuccessiveApproximationBitPositionLowOrPointTransform());
+			predictorForFirstSample = 1 << (sof.getSamplePrecision() - sos.getSuccessiveApproximationBitPositionLowOrPointTransform() - 1);
+//System.err.println("predictorForFirstSample "+predictorForFirstSample+" dec");
+			predictorForComponent = new int[nComponents];
+			predictorSelectionValue = sos.getStartOfSpectralOrPredictorSelection();
+//System.err.println("predictorSelectionValue "+predictorSelectionValue);
+
+			rowLength = new int[nComponents];
+			currentRowNumber = new int[nComponents];
+			positionWithinRow = new int[nComponents];
+			rowNumberAtBeginningOfRestartInterval = new int[nComponents];
+			previousReconstructedRow = new int[nComponents][];
+			currentReconstructedRow = new int[nComponents][];
+			for (int c=0; c<nComponents; ++c) {
+				//rowLength[c] = sof.getNSamplesPerLine()/sof.getHorizontalSamplingFactor()[c];
+				rowLength[c] = (sof.getNSamplesPerLine()-1)/sof.getHorizontalSamplingFactor()[c]+1;		// account for sampling of row lengths not an exact multiple of sampling factor ... hmmm :(
+//System.err.println("rowLength["+c+"] "+rowLength[c]);
+				currentRowNumber[c] = 0;
+				positionWithinRow[c] = 0;
+				rowNumberAtBeginningOfRestartInterval[c] = 0;
+				previousReconstructedRow[c] = new int[rowLength[c]];
+				currentReconstructedRow[c] = new int[rowLength[c]];
+			}
+		}
+		else {
+			predictorForFirstSample = 0;	// silence uninitialized warnings
+			predictorForComponent = null;
+			predictorSelectionValue = 0;
+			rowLength = null;
+			currentRowNumber = null;
+			positionWithinRow = null;
+			rowNumberAtBeginningOfRestartInterval = null;
+			previousReconstructedRow = null;
+			currentReconstructedRow = null;
+		}
+		
+		if (dumping) dumpHuffmanTables();
+		//dumpQuantizationTables();
+	}
+
+	private final int getOneLosslessValue(int c,int dcEntropyCodingTableSelector,int colMCU,int rowMCU) throws Exception {
+		// per P&M page 492 (DIS H-2)
+		int prediction = 0;
+		if (decompressing) {
+			if (currentRowNumber[c] == rowNumberAtBeginningOfRestartInterval[c]) {		// will be true for first row since all rowNumberAtBeginningOfRestartInterval entries are initialized to zero
+				if (positionWithinRow[c] == 0)	{	// first sample of first row
+//System.err.println("Component "+c+" first sample of first row or first row after beginning of restart interval ... use predictorForFirstSample");
+					prediction = predictorForFirstSample;
+				}
+				else {
+//System.err.println("Component "+c+" other than first sample of first row or first row after beginning of restart interval ... use Ra (previous sample in row)");
+					prediction = currentReconstructedRow[c][positionWithinRow[c]-1];	// Ra
+				}
+			}
+			else if (positionWithinRow[c] == 0) {						// first sample of subsequent rows
+//System.err.println("Component "+c+" first sample of subsequent rows");
+				prediction = previousReconstructedRow[c][0];			// Rb for position 0
+			}
+			else {
+				switch(predictorSelectionValue) {
+					case 1:	prediction = currentReconstructedRow[c][positionWithinRow[c]-1];	// Ra
+							break;
+					case 2:	prediction = previousReconstructedRow[c][positionWithinRow[c]];		// Rb
+							break;
+					case 3:	prediction = previousReconstructedRow[c][positionWithinRow[c]-1];	// Rc
+							break;
+					case 4:	prediction = currentReconstructedRow[c][positionWithinRow[c]-1] + previousReconstructedRow[c][positionWithinRow[c]] - previousReconstructedRow[c][positionWithinRow[c]-1];		// Ra + Rb - Rc
+							break;
+					case 5:	prediction = currentReconstructedRow[c][positionWithinRow[c]-1] + ((previousReconstructedRow[c][positionWithinRow[c]] - previousReconstructedRow[c][positionWithinRow[c]-1])>>1);	// Ra + (Rb - Rc)/2
+							break;
+					case 6:	prediction = previousReconstructedRow[c][positionWithinRow[c]] + ((currentReconstructedRow[c][positionWithinRow[c]-1] - previousReconstructedRow[c][positionWithinRow[c]-1])>>1);	// Rb + (Ra - Rc)/2
+							break;
+					case 7: prediction = (currentReconstructedRow[c][positionWithinRow[c]-1] + previousReconstructedRow[c][positionWithinRow[c]])>>1;	// (Ra+Rb)/2
+							break;
+					default:
+						throw new Exception("Unrecognized predictor selection value "+predictorSelectionValue);
+				}
+			}
+//System.err.println("prediction ["+currentRowNumber[c]+","+positionWithinRow[c]+"] = "+prediction+" dec (0x"+Integer.toHexString(prediction)+")");
+		}
+			
+		usingTable = htByClassAndIdentifer.get("0+"+Integer.toString(dcEntropyCodingTableSelector));
+
+		final int ssss = decode();	// number of DC bits encoded next
+		// see P&M Table 11-1 page 190
+		int dcValue = 0;
+		if (ssss == 0) {
+			dcValue = 0;
+		}
+		else if (ssss == 16) {	// only occurs for lossless
+			dcValue = 32768;
+		}
+		else {
+			final int dcBits = getValueOfRequestedLength(ssss);
+			dcValue = convertSignAndAmplitudeBitsToValue(dcBits,ssss);
+		}
+//System.err.println("encoded difference value ["+currentRowNumber[c]+","+positionWithinRow[c]+"] = "+dcValue+" dec (0x"+Integer.toHexString(dcValue)+")");
+		
+		int reconstructedValue = 0;
+		
+		if (decompressing) {
+			reconstructedValue = (dcValue + prediction) & 0x0000ffff;
+		
+//System.err.println("reconstructedValue value ["+currentRowNumber[c]+","+positionWithinRow[c]+"] = "+reconstructedValue+" dec (0x"+Integer.toHexString(reconstructedValue)+")");
+		
+			currentReconstructedRow[c][positionWithinRow[c]] = reconstructedValue;
+		
+			++positionWithinRow[c];
+			if (positionWithinRow[c] >= rowLength[c]) {
+//System.err.println("Component "+c+" starting next row");
+				positionWithinRow[c] = 0;
+				++currentRowNumber[c];
+				int[] holdRow = previousReconstructedRow[c];
+				previousReconstructedRow[c] = currentReconstructedRow[c];
+				currentReconstructedRow[c] = holdRow;	// values do not matter, will be overwritten, saves deallocating and reallocating
+			}
+		}
+		
+		return reconstructedValue;	// meaingless unless decompressing, but still need to have absorbed bits from input to stay in sync
+	}
+	
+	// A "data unit" is the "smallest logical unit that can be processed", which in the case of DCT-based processes is one 8x8 block of coefficients (P&M page 101)
+	// returns updated accumulatedDCDifferenceDuringRedaction
+	private final int getOneDCTDataUnit(int dcEntropyCodingTableSelector,int acEntropyCodingTableSelector,boolean redact,int accumulatedDCDifferenceDuringRedaction) throws Exception {
+		usingTable = htByClassAndIdentifer.get("0+"+Integer.toString(dcEntropyCodingTableSelector));
+		{
+			final boolean wasCopying = copying;
+			copying = false;
+			{
+				final int ssss = decode();	// number of DC bits encoded next
+				// see P&M Table 11-1 page 190
+				int dcDIFF = 0;
+				int dcBits = 0;		// only need up here for later comparison with new computed values
+				if (ssss == 0) {
+					dcDIFF = 0;
+				}
+				else if (ssss == 16) {	// only occurs for lossless
+					dcDIFF = 32768;
+				}
+				else {
+					dcBits = getValueOfRequestedLength(ssss);
+					dcDIFF = convertSignAndAmplitudeBitsToValue(dcBits,ssss);
+				}
+//System.err.println("Got encoded DC DIFF "+dcDIFF+" dec (0x"+Integer.toHexString(dcDIFF)+")");
+//System.err.println("accumulatedDCDifferenceDuringRedaction was "+accumulatedDCDifferenceDuringRedaction+" dec (0x"+Integer.toHexString(accumulatedDCDifferenceDuringRedaction)+")");
+				if (redact) {
+System.err.println("Redacting this DCTDataUnit - to accumulatedDCDifferenceDuringRedaction "+accumulatedDCDifferenceDuringRedaction+" so far, adding this redacted dcDIFF "+dcDIFF);
+					accumulatedDCDifferenceDuringRedaction += dcDIFF;	// track it
+					dcDIFF = 0;
+				}
+				else {
+System.err.println("Not redacting this DCTDataUnit - applying accumulatedDCDifferenceDuringRedaction "+accumulatedDCDifferenceDuringRedaction+" to this non-redacted dcDIFF "+dcDIFF);
+					dcDIFF += accumulatedDCDifferenceDuringRedaction;	// apply it
+					accumulatedDCDifferenceDuringRedaction = 0;
+				}
+//System.err.println("accumulatedDCDifferenceDuringRedaction now "+accumulatedDCDifferenceDuringRedaction+" dec (0x"+Integer.toHexString(accumulatedDCDifferenceDuringRedaction)+")");
+//System.err.println("Writing DC DIFF "+dcDIFF+" dec (0x"+Integer.toHexString(dcDIFF)+")");
+
+				{
+					final int newSSSS = getNumberOfSignBits(dcDIFF);
+					final int newDCBits = getBits(dcDIFF,newSSSS);
+					//if (newSSSS != ssss || newDCBits != dcBits) {
+System.err.println("For DC value "+dcDIFF+" dec (0x"+Integer.toHexString(dcDIFF)+") SSSS was "+ssss+" is "+newSSSS+", DCBits was "+dcBits+" dec (0x"+Integer.toHexString(dcBits)+") is "+newDCBits+" dec (0x"+Integer.toHexString(newDCBits)+")");
+					//}
+					if (wasCopying) {
+						encode(ssss);
+						if (ssss > 0 && ssss < 16) {
+							writeBits(newDCBits,ssss);
+						}
+					}
+				}
+			}
+			copying = wasCopying;
+		}
+	
+		usingTable = htByClassAndIdentifer.get("1+"+Integer.toString(acEntropyCodingTableSelector));
+		{
+			final boolean wasCopying = copying;
+			if (redact && copying) {
+				copying = false;
+				writeEntropyCodedAllZeroACCoefficients();
+			}
+			
+			int i=1;
+			while (i<64) {
+				//System.err.println("AC ["+i+"]:");
+				final int rrrrssss = decode();
+				if (rrrrssss == 0) {
+					//System.err.println("AC ["+i+"]: "+"EOB");
+					break; // EOB
+				}
+				else if (rrrrssss == 0xF0) {
+					//System.err.println("AC ["+i+"]: "+"ZRL: 16 zeroes");
+					i+=16;
+				}
+				else {
+					// note that ssss of zero is not used for AC (unlike DC) in sequential mode
+					final int rrrr = rrrrssss >>> 4;
+					final int ssss = rrrrssss & 0x0f;
+					//System.err.println("AC ["+i+"]: rrrr="+rrrr+" ssss="+ssss);
+					final int acBits = getValueOfRequestedLength(ssss);
+					final int acValue = convertSignAndAmplitudeBitsToValue(acBits,ssss);
+					//System.err.println("AC ["+i+"]: "+rrrr+" zeroes then value "+acValue);
+					i+=rrrr;	// the number of zeroes
+					++i;		// the value we read (ssss is always non-zero, so we always read something
+				}
+			}
+			
+			copying = wasCopying;
+		}
+		
+		return accumulatedDCDifferenceDuringRedaction;
+	}
+	
+	private final boolean redactionDecision(int colMCU,int rowMCU,int thisHorizontalSamplingFactor,int thisVerticalSamplingFactor,int maxHorizontalSamplingFactor,int maxVerticalSamplingFactor,int h,int v,Vector<Shape> redactionShapes) {
+		// only invoked for DCT so block size is always 8
+		final int vMCUSize = 8 * maxVerticalSamplingFactor;
+		final int hMCUSize = 8 * maxHorizontalSamplingFactor;
+//System.err.println("MCUSize in pixels = "+hMCUSize+" * "+vMCUSize);
+		
+		final int hMCUOffset = colMCU * hMCUSize;
+		final int vMCUOffset = rowMCU * vMCUSize;
+//System.err.println("MCUOffset in pixels = "+hMCUOffset+" * "+vMCUOffset);
+		
+		final int hBlockSize = 8 * maxHorizontalSamplingFactor/thisHorizontalSamplingFactor;
+		final int vBlockSize = 8 * maxVerticalSamplingFactor/thisVerticalSamplingFactor;
+//System.err.println("BlockSize in pixels = "+hBlockSize+" * "+vBlockSize);
+		
+		final int xBlock = hMCUOffset + h * hBlockSize;
+		final int yBlock = vMCUOffset + v * vBlockSize;
+		
+		Rectangle blockShape = new Rectangle(xBlock,yBlock,hBlockSize,vBlockSize);
+//System.err.println("blockShape "+blockShape);
+		
+		boolean redact = false;
+		if (redactionShapes != null) {
+			for (Shape redactionShape : redactionShapes) {
+				if (redactionShape.intersects(blockShape)) {
+					redact = true;
+					break;
+				}
+			}
+		}
+		return redact;
+	}
+	
+	private final void writeDecompressedPixel(int c,int decompressedPixel) throws IOException {
+		if (sof.getSamplePrecision() <= 8) {
+			decompressedOutputPerComponent[c].writeByte(decompressedPixel);
+		}
+		else {
+			// endianness handled by OutputArrayOrStream
+			decompressedOutputPerComponent[c].writeShort(decompressedPixel);
+		}
+	}
+	
+	private final void getOneMinimumCodedUnit(int nComponents,int[] DCEntropyCodingTableSelector,int[] ACEntropyCodingTableSelector,int[] HorizontalSamplingFactor,int[] VerticalSamplingFactor,int maxHorizontalSamplingFactor,int maxVerticalSamplingFactor,int colMCU,int rowMCU,int[] accumulatedDCDifferenceDuringRedaction,Vector<Shape> redactionShapes) throws Exception, IOException {
+		for (int c=0; c<nComponents; ++c) {
+			// See discussion of interleaving of data units within MCUs in P&M section 7.3.5 pages 101-105; always interleaved in sequential mode
+			for (int v=0; v<VerticalSamplingFactor[c]; ++v) {
+				for (int h=0; h<HorizontalSamplingFactor[c]; ++h) {
+//System.err.println("Component "+c+" v "+v+" h "+h);
+					boolean redact = redactionDecision(colMCU,rowMCU,HorizontalSamplingFactor[c],VerticalSamplingFactor[c],maxHorizontalSamplingFactor,maxVerticalSamplingFactor,h,v,redactionShapes);
+					if (isDCT) {
+						accumulatedDCDifferenceDuringRedaction[c] = getOneDCTDataUnit(DCEntropyCodingTableSelector[c],ACEntropyCodingTableSelector[c],redact,accumulatedDCDifferenceDuringRedaction[c]);
+					}
+					else if (isLossless) {
+						int decompressedPixel = getOneLosslessValue(c,DCEntropyCodingTableSelector[c],colMCU,rowMCU);
+						if (decompressing) {
+							writeDecompressedPixel(c,decompressedPixel);
+						}
+					}
+					else {
+						throw new Exception("Only DCT or Lossless processes supported (not "+Markers.getAbbreviation(sof.getMarker())+" "+Markers.getDescription(sof.getMarker())+")");
+					}
+				}
+			}
+		}
+	}
+	
+	private static final int max(int[] a) {
+		int m = Integer.MIN_VALUE;
+		for (int i : a) {
+			if (i > m) m = i;
+		}
+		return m;
+	}
+	
+	/**
+	 * <p>Decode the supplied bytes that comprise a complete EntropyCodedSeqment and redact or copy them as required.</p>
+	 *
+	 * @param	bytesToDecompress	the bytes in the EntropyCodedSeqment
+	 * @param	mcuCount			the number of MCUs encoded by this EntropyCodedSeqment
+	 * @param	mcuOffset			the number of MCUs that have previously been read for the frame containing this EntropyCodedSeqment
+	 * @return						the bytes in a copy of the EntropyCodedSeqment appropriately redacted
+	 * @throws Exception			if bad things happen parsing the EntropyCodedSeqment, like running out of bits, caused by malformed input
+	 * @throws IOException		if bad things happen reading or writing the bytes
+	 */
+	public final byte[] finish(byte[] bytesToDecompress,int mcuCount,int mcuOffset) throws Exception, IOException {
+//System.err.println("****** EntropyCodedSeqment.finish()");
+		this.bytesToDecompress = bytesToDecompress;
+		availableBytes = this.bytesToDecompress.length;
+		byteIndex = 0;
+		bitIndex = 8;	// force fetching byte the first time
+		haveBits = 0;	// don't have any bits to start with
+		
+		if (copying) {
+			initializeWriteBits();		// will create a new ByteArrayOutputStream
+		}
+
+		if (rowNumberAtBeginningOfRestartInterval != null) {	// do not need to do this unless decompressing lossless
+			for (int c=0; c<nComponents; ++c) {
+//System.err.println("Setting rowNumberAtBeginningOfRestartInterval["+c+"] to "+currentRowNumber[c]);
+				rowNumberAtBeginningOfRestartInterval[c] = currentRowNumber[c];	// for lossless decompression predictor selection
+			}
+		}
+		
+		int[] accumulatedDCDifferenceDuringRedaction = new int[nComponents];
+		for (int c=0; c<nComponents; ++c) {
+			accumulatedDCDifferenceDuringRedaction[c] = 0;	// P&M p171 "At the beginning of the scan and ... each restart interval, PRED is initialized to 0 (is actually a neutral gray)"
+		}
+		//try {
+		
+		for (int mcu=0; mcu<mcuCount; ++mcu) {
+			int rowMCU = mcuOffset / nMCUHorizontally;
+			int colMCU = mcuOffset % nMCUHorizontally;
+//System.err.println("MCU ("+rowMCU+","+colMCU+")");
+			getOneMinimumCodedUnit(nComponents,DCEntropyCodingTableSelector,ACEntropyCodingTableSelector,HorizontalSamplingFactor,VerticalSamplingFactor,maxHorizontalSamplingFactor,maxVerticalSamplingFactor,colMCU,rowMCU,accumulatedDCDifferenceDuringRedaction,redactionShapes);
+			++mcuOffset;
+		}
+
+//System.err.println("Finished ...");
+//System.err.println("availableBytes = "+availableBytes);
+//System.err.println("byteIndex = "+byteIndex);
+//System.err.println("bitIndex = "+bitIndex);
+//System.err.println("currentByte = "+currentByte);
+//System.err.println("currentBits = "+currentBits);
+//System.err.println("haveBits = "+haveBits);
+		
+		//}
+		//catch (Exception e) {
+		//	e.printStackTrace(System.err);
+		//}
+
+		if (copying) {
+			flushWriteBits();		// will pad appropriately to byte boundary
+		}
+		
+		return copying ? copiedBytes.toByteArray() : null;
+	}
+		
+	private final void dumpHuffmanTables() {
+		System.err.print("\n");
+		for (HuffmanTable ht : htByClassAndIdentifer.values()) {
+			System.err.print(ht.toString());
+		}
+	}
+	
+	private final void dumpQuantizationTables() {
+		System.err.print("\n");
+		for (QuantizationTable qt : qtByIdentifer.values()) {
+			System.err.print(qt.toString());
+		}
+	}
+	
+}
+


=====================================
com/pixelmed/codec/jpeg/EntropyCodedSegment.java
=====================================
@@ -22,7 +22,7 @@ import java.util.Vector;
  */
 public class EntropyCodedSegment {
 
-	private static final String identString = "@(#) $Header: /userland/cvs/codec/com/pixelmed/codec/jpeg/EntropyCodedSegment.java,v 1.24 2016/01/16 13:30:09 dclunie Exp $";
+	private static final String identString = "@(#) $Header: /userland/cvs/codec/com/pixelmed/codec/jpeg/EntropyCodedSegment.java,v 1.25 2020/03/28 21:05:39 dclunie Exp $";
 
 	private boolean copying;
 	private boolean decompressing;
@@ -178,7 +178,7 @@ public class EntropyCodedSegment {
 //System.err.println("CODE "+Integer.toBinaryString(CODE));
 //System.err.println("compare to MAXCODE[I] "+(I<MAXCODE.length ? Integer.toBinaryString(MAXCODE[I]) : "out of MAXCODE entries"));
 		}
-//System.err.println("Found CODE "+Integer.toBinaryString(CODE));
+//System.err.println("Decoded CODE "+Integer.toBinaryString(CODE)+" of length "+I);
 		int VALUE = 0;
 		if (I<MAXCODE.length) {
 			int J = VALPTR[I];
@@ -200,6 +200,16 @@ public class EntropyCodedSegment {
 		haveBits=0;
 		return VALUE;
 	}
+
+	private final void encode(int VALUE) {
+//System.err.println("Given VALUE "+VALUE+" dec (0x"+Integer.toHexString(VALUE)+")");
+		final int[] EFUFCO = usingTable.getEFUFCO();
+		final int[] EFUFSI = usingTable.getEFUFSI();
+		int CODE = EFUFCO[VALUE];
+		int size = EFUFSI[VALUE];
+//System.err.println("Encoding CODE "+Integer.toBinaryString(CODE)+" of length "+size);
+		writeBits(CODE,size);
+	}
 	
 	private final int getValueOfRequestedLength(int wantBits) throws Exception {
 		getEnoughBits(wantBits);	// modifies currentBits
@@ -227,6 +237,31 @@ public class EntropyCodedSegment {
 		return value;
 	}
 	
+	private final int getNumberOfSignBits(int value) {
+		int ssss = 0;
+		if (value < 0) {
+			value = - value;
+		}
+		while (value > 0) {
+			++ssss;
+			value = value >> 1;
+		}
+		return ssss;
+	}
+	
+	private final int getBits(int value,int ssss) {
+		int bits = 0;
+		if (ssss > 0) {
+			if (value < 0) {	// "if ... -ve, subtract 1 ... and append the SSSS low-order bits of this result" P&M p191
+				--value;
+			}
+			// else "if ... +ve, append the SSSS low-order bits" P&M p191
+			bits = value & maxAmplitude[ssss];
+		}
+		return bits;
+	}
+
+	
 	private final void writeEntropyCodedAllZeroACCoefficients() {
 		// write a single EOB code, which is rrrrssss = 0x00;
 		writeBits(usingTable.getEOBCode(),usingTable.getEOBCodeLength());
@@ -402,62 +437,119 @@ public class EntropyCodedSegment {
 	}
 	
 	// A "data unit" is the "smallest logical unit that can be processed", which in the case of DCT-based processes is one 8x8 block of coefficients (P&M page 101)
-	private final void getOneDCTDataUnit(int dcEntropyCodingTableSelector,int acEntropyCodingTableSelector,boolean redact) throws Exception {
+	// updates originalDCValue[c]
+	private void getOneDCTDataUnit(int dcEntropyCodingTableSelector,int acEntropyCodingTableSelector,boolean redact,boolean firstRedaction,boolean wasRedacting,int c,int[] originalDCValue) throws Exception {
 		usingTable = htByClassAndIdentifer.get("0+"+Integer.toString(dcEntropyCodingTableSelector));
 		{
-			final int ssss = decode();	// number of DC bits encoded next
-			// see P&M Table 11-1 page 190
-			int dcValue = 0;
-			if (ssss == 0) {
-				dcValue = 0;
-			}
-			else if (ssss == 16) {	// only occurs for lossless
-				dcValue = 32768;
-			}
-			else {
-				final int dcBits = getValueOfRequestedLength(ssss);
-				dcValue = convertSignAndAmplitudeBitsToValue(dcBits,ssss);
+			final boolean wasCopying = copying;
+			copying = false;
+			{
+				final int ssss = decode();	// number of DC bits encoded next
+				// see P&M Table 11-1 page 190
+				int dcDIFF = 0;
+				int dcBits = 0;		// only need up here for later comparison with new computed values
+				if (ssss == 0) {
+					dcDIFF = 0;
+				}
+				else if (ssss == 16) {	// only occurs for lossless
+					dcDIFF = 32768;
+				}
+				else {
+					dcBits = getValueOfRequestedLength(ssss);
+					dcDIFF = convertSignAndAmplitudeBitsToValue(dcBits,ssss);
+				}
+//System.err.println("Got encoded DC DIFF "+dcDIFF+" dec (0x"+Integer.toHexString(dcDIFF)+")");
+//System.err.println("originalDCValue was "+originalDCValue[c]+" dec (0x"+Integer.toHexString(originalDCValue[c])+")");
+				{
+					int newDCDIFF;
+					if (redact) {
+						if (firstRedaction) {
+//System.err.println("Redacting - first redaction");
+							newDCDIFF = - originalDCValue[c];	// what we need to make what we had (before the current block) neutral gray
+							originalDCValue[c] += dcDIFF;
+						}
+						else {
+//System.err.println("Redacting - not first redaction");
+							newDCDIFF = 0;
+							originalDCValue[c] += dcDIFF;
+						}
+					}
+					else {
+						if (wasRedacting) {
+//System.err.println("Not redacting - but was last time, so use diff to restore original value plus specified diff for this block");
+							originalDCValue[c] += dcDIFF;
+							newDCDIFF = originalDCValue[c]; // the difference between the neutral gray we have and what we want (which already includes the current block's encoded dcDIFF)
+						}
+						else {
+//System.err.println("Not redacting - and wasn't last time");
+							originalDCValue[c] += dcDIFF;
+							// no redaction activity - rewrite what we decoded
+							newDCDIFF = dcDIFF;
+						}
+					}
+//System.err.println("originalDCValue now "+originalDCValue[c]+" dec (0x"+Integer.toHexString(originalDCValue[c])+")");
+
+//System.err.println("Writing new DC DIFF "+newDCDIFF+" dec (0x"+Integer.toHexString(newDCDIFF)+")");
+					final int newSSSS = getNumberOfSignBits(newDCDIFF);
+					final int newDCBits = getBits(newDCDIFF,newSSSS);
+//					if (convertSignAndAmplitudeBitsToValue(newDCBits,newSSSS) != newDCDIFF) {
+//System.err.println("Encoding of DC DIFF failed round-trip test");
+//					}
+					
+					if (newSSSS != ssss || newDCBits != dcBits) {
+//System.err.println("For DC value was "+dcDIFF+" dec (0x"+Integer.toHexString(dcDIFF)+") now "+newDCDIFF+" dec (0x"+Integer.toHexString(newDCDIFF)+") SSSS was "+ssss+" is "+newSSSS+", DCBits was "+dcBits+" dec (0x"+Integer.toHexString(dcBits)+") is "+newDCBits+" dec (0x"+Integer.toHexString(newDCBits)+")");
+					}
+					if (wasCopying) {
+						encode(newSSSS);
+						if (newSSSS > 0 && newSSSS < 16) {
+							writeBits(newDCBits,newSSSS);
+						}
+					}
+				}
 			}
-//System.err.println("Got DC value "+dcValue+" dec (0x"+Integer.toHexString(dcValue)+")");
+			copying = wasCopying;
 		}
-		
+	
 		usingTable = htByClassAndIdentifer.get("1+"+Integer.toString(acEntropyCodingTableSelector));
-		
-		final boolean restoreCopying = copying;
-		if (redact && copying) {
-			copying = false;
-			writeEntropyCodedAllZeroACCoefficients();
-		}
-		
-		int i=1;
-		while (i<64) {
-//System.err.println("AC ["+i+"]:");
-			final int rrrrssss = decode();
-			if (rrrrssss == 0) {
-//System.err.println("AC ["+i+"]: "+"EOB");
-				break; // EOB
-			}
-			else if (rrrrssss == 0xF0) {
-//System.err.println("AC ["+i+"]: "+"ZRL: 16 zeroes");
-				i+=16;
+		{
+			final boolean wasCopying = copying;
+			if (redact && copying) {
+				copying = false;
+				writeEntropyCodedAllZeroACCoefficients();
 			}
-			else {
-				// note that ssss of zero is not used for AC (unlike DC) in sequential mode
-				final int rrrr = rrrrssss >>> 4;
-				final int ssss = rrrrssss & 0x0f;
-//System.err.println("AC ["+i+"]: rrrr="+rrrr+" ssss="+ssss);
-				final int acBits = getValueOfRequestedLength(ssss);
-				final int acValue = convertSignAndAmplitudeBitsToValue(acBits,ssss);
-//System.err.println("AC ["+i+"]: "+rrrr+" zeroes then value "+acValue);
-				i+=rrrr;	// the number of zeroes
-				++i;		// the value we read (ssss is always non-zero, so we always read something
+			
+			int i=1;
+			while (i<64) {
+				//System.err.println("AC ["+i+"]:");
+				final int rrrrssss = decode();
+				if (rrrrssss == 0) {
+					//System.err.println("AC ["+i+"]: "+"EOB");
+					break; // EOB
+				}
+				else if (rrrrssss == 0xF0) {
+					//System.err.println("AC ["+i+"]: "+"ZRL: 16 zeroes");
+					i+=16;
+				}
+				else {
+					// note that ssss of zero is not used for AC (unlike DC) in sequential mode
+					final int rrrr = rrrrssss >>> 4;
+					final int ssss = rrrrssss & 0x0f;
+					//System.err.println("AC ["+i+"]: rrrr="+rrrr+" ssss="+ssss);
+					final int acBits = getValueOfRequestedLength(ssss);
+					final int acValue = convertSignAndAmplitudeBitsToValue(acBits,ssss);
+					//System.err.println("AC ["+i+"]: "+rrrr+" zeroes then value "+acValue);
+					i+=rrrr;	// the number of zeroes
+					++i;		// the value we read (ssss is always non-zero, so we always read something
+				}
 			}
+			
+			copying = wasCopying;
 		}
-		
-		copying = restoreCopying;
 	}
 	
 	private final boolean redactionDecision(int colMCU,int rowMCU,int thisHorizontalSamplingFactor,int thisVerticalSamplingFactor,int maxHorizontalSamplingFactor,int maxVerticalSamplingFactor,int h,int v,Vector<Shape> redactionShapes) {
+		boolean redactJustBlockNotEntireMCU = true;
+		
 		// only invoked for DCT so block size is always 8
 		final int vMCUSize = 8 * maxVerticalSamplingFactor;
 		final int hMCUSize = 8 * maxHorizontalSamplingFactor;
@@ -466,15 +558,21 @@ public class EntropyCodedSegment {
 		final int hMCUOffset = colMCU * hMCUSize;
 		final int vMCUOffset = rowMCU * vMCUSize;
 //System.err.println("MCUOffset in pixels = "+hMCUOffset+" * "+vMCUOffset);
-		
-		final int hBlockSize = 8 * maxHorizontalSamplingFactor/thisHorizontalSamplingFactor;
-		final int vBlockSize = 8 * maxVerticalSamplingFactor/thisVerticalSamplingFactor;
+
+		Rectangle blockShape = null;
+		if (redactJustBlockNotEntireMCU) {
+			final int hBlockSize = 8 * maxHorizontalSamplingFactor/thisHorizontalSamplingFactor;
+			final int vBlockSize = 8 * maxVerticalSamplingFactor/thisVerticalSamplingFactor;
 //System.err.println("BlockSize in pixels = "+hBlockSize+" * "+vBlockSize);
 		
-		final int xBlock = hMCUOffset + h * hBlockSize;
-		final int yBlock = vMCUOffset + v * vBlockSize;
+			final int xBlock = hMCUOffset + h * hBlockSize;
+			final int yBlock = vMCUOffset + v * vBlockSize;
 		
-		Rectangle blockShape = new Rectangle(xBlock,yBlock,hBlockSize,vBlockSize);
+			blockShape = new Rectangle(xBlock,yBlock,hBlockSize,vBlockSize);
+		}
+		else {
+			blockShape = new Rectangle(hMCUOffset,vMCUOffset,hMCUSize,vMCUSize);
+		}
 //System.err.println("blockShape "+blockShape);
 		
 		boolean redact = false;
@@ -499,15 +597,22 @@ public class EntropyCodedSegment {
 		}
 	}
 	
-	private final void getOneMinimumCodedUnit(int nComponents,int[] DCEntropyCodingTableSelector,int[] ACEntropyCodingTableSelector,int[] HorizontalSamplingFactor,int[] VerticalSamplingFactor,int maxHorizontalSamplingFactor,int maxVerticalSamplingFactor,int colMCU,int rowMCU,Vector<Shape> redactionShapes) throws Exception, IOException {
+	private final void getOneMinimumCodedUnit(int nComponents,int[] DCEntropyCodingTableSelector,int[] ACEntropyCodingTableSelector,int[] HorizontalSamplingFactor,int[] VerticalSamplingFactor,int maxHorizontalSamplingFactor,int maxVerticalSamplingFactor,int colMCU,int rowMCU,int[] originalDCValue,Boolean[] firstRedaction,Boolean[] wasRedacting,Vector<Shape> redactionShapes) throws Exception, IOException {
 		for (int c=0; c<nComponents; ++c) {
 			// See discussion of interleaving of data units within MCUs in P&M section 7.3.5 pages 101-105; always interleaved in sequential mode
 			for (int v=0; v<VerticalSamplingFactor[c]; ++v) {
 				for (int h=0; h<HorizontalSamplingFactor[c]; ++h) {
 //System.err.println("Component "+c+" v "+v+" h "+h);
 					boolean redact = redactionDecision(colMCU,rowMCU,HorizontalSamplingFactor[c],VerticalSamplingFactor[c],maxHorizontalSamplingFactor,maxVerticalSamplingFactor,h,v,redactionShapes);
+					firstRedaction[c] = false;	// whether redacting this block or not
+					if (redact) {
+						if (!wasRedacting[c]) {
+							firstRedaction[c] = true;
+						}
+					}
 					if (isDCT) {
-						getOneDCTDataUnit(DCEntropyCodingTableSelector[c],ACEntropyCodingTableSelector[c],redact);
+						// P&M p103 "PRED is always the preceding DC value coded for the same component"
+						getOneDCTDataUnit(DCEntropyCodingTableSelector[c],ACEntropyCodingTableSelector[c],redact,firstRedaction[c],wasRedacting[c],c,originalDCValue);
 					}
 					else if (isLossless) {
 						int decompressedPixel = getOneLosslessValue(c,DCEntropyCodingTableSelector[c],colMCU,rowMCU);
@@ -518,6 +623,7 @@ public class EntropyCodedSegment {
 					else {
 						throw new Exception("Only DCT or Lossless processes supported (not "+Markers.getAbbreviation(sof.getMarker())+" "+Markers.getDescription(sof.getMarker())+")");
 					}
+					wasRedacting[c] = redact;
 				}
 			}
 		}
@@ -542,6 +648,7 @@ public class EntropyCodedSegment {
 	 * @throws IOException		if bad things happen reading or writing the bytes
 	 */
 	public final byte[] finish(byte[] bytesToDecompress,int mcuCount,int mcuOffset) throws Exception, IOException {
+//System.err.println("****** EntropyCodedSeqment.finish()");
 		this.bytesToDecompress = bytesToDecompress;
 		availableBytes = this.bytesToDecompress.length;
 		byteIndex = 0;
@@ -552,19 +659,28 @@ public class EntropyCodedSegment {
 			initializeWriteBits();		// will create a new ByteArrayOutputStream
 		}
 
-		if (rowNumberAtBeginningOfRestartInterval != null) {	// do not need to do this unless decompressiong lossless
+		if (rowNumberAtBeginningOfRestartInterval != null) {	// do not need to do this unless decompressing lossless
 			for (int c=0; c<nComponents; ++c) {
 //System.err.println("Setting rowNumberAtBeginningOfRestartInterval["+c+"] to "+currentRowNumber[c]);
 				rowNumberAtBeginningOfRestartInterval[c] = currentRowNumber[c];	// for lossless decompression predictor selection
 			}
 		}
+		
+		int[] originalDCValue = new int[nComponents];
+		Boolean[] firstRedaction = new Boolean[nComponents];
+		Boolean[] wasRedacting = new Boolean[nComponents];
+		for (int c=0; c<nComponents; ++c) {
+			originalDCValue[c] = 0;		// P&M p171 "At the beginning of the scan and ... each restart interval, PRED is initialized to 0 (is actually a neutral gray)"
+			firstRedaction[c] = false;
+			wasRedacting[c] = false;
+		}
 		//try {
 		
 		for (int mcu=0; mcu<mcuCount; ++mcu) {
 			int rowMCU = mcuOffset / nMCUHorizontally;
 			int colMCU = mcuOffset % nMCUHorizontally;
 //System.err.println("MCU ("+rowMCU+","+colMCU+")");
-			getOneMinimumCodedUnit(nComponents,DCEntropyCodingTableSelector,ACEntropyCodingTableSelector,HorizontalSamplingFactor,VerticalSamplingFactor,maxHorizontalSamplingFactor,maxVerticalSamplingFactor,colMCU,rowMCU,redactionShapes);
+			getOneMinimumCodedUnit(nComponents,DCEntropyCodingTableSelector,ACEntropyCodingTableSelector,HorizontalSamplingFactor,VerticalSamplingFactor,maxHorizontalSamplingFactor,maxVerticalSamplingFactor,colMCU,rowMCU,originalDCValue,firstRedaction,wasRedacting,redactionShapes);
 			++mcuOffset;
 		}
 


=====================================
com/pixelmed/codec/jpeg/HuffmanTable.java
=====================================
@@ -9,7 +9,7 @@ package com.pixelmed.codec.jpeg;
  */
 public class HuffmanTable {
 
-	private static final String identString = "@(#) $Header: /userland/cvs/codec/com/pixelmed/codec/jpeg/HuffmanTable.java,v 1.4 2014/03/23 11:41:54 dclunie Exp $";
+	private static final String identString = "@(#) $Header: /userland/cvs/codec/com/pixelmed/codec/jpeg/HuffmanTable.java,v 1.5 2020/03/28 21:05:39 dclunie Exp $";
 
 	private int TableClass;
 	private int HuffmanTableIdentifier;
@@ -48,8 +48,15 @@ public class HuffmanTable {
 	public int[] getMAXCODE() { return MAXCODE; };
 	public int[] getVALPTR()  { return VALPTR; };
 	public int[] getHUFFVAL() { return HUFFVAL; };
-	
-	// for our redaction purposes, we need to replace AC coefficients with all zeroes (EOB), so take note of this code whilst expanding tables
+	public int[] getHUFFSIZE() { return HUFFSIZE; };
+
+	private int[] EFUFCO;
+	private int[] EFUFSI;
+
+	public int[] getEFUFCO() { return EFUFCO; };
+	public int[] getEFUFSI() { return EFUFSI; };
+
+	// for our redaction purposes, we need to replace AC coefficients with all zeroes (EOB), so take note of what this code is whilst expanding tables
 	private int EOBCode;
 	private int EOBCodeLength;
 	
@@ -67,15 +74,20 @@ public class HuffmanTable {
 		}
 		
 		int nCodes = countNumberOfCodes();
-		// HUFFVAL is a flat list of codes in the order read they are encoded in the DHT segment, which is already sorted into ascending orded
+		int largestValue = 0;
+		// HUFFVAL is a flat list of codes in the order read they are encoded in the DHT segment, which is already sorted into ascending order
 		{
-			HUFFVAL = new int[nCodes+1];
+			HUFFVAL = new int[nCodes];
+
 			int J = 0;	// N.B. This is one of the few tables in ISO 10918-1 that starts with an index of zero, not one; must match VALPTR values used as indices into HUFFVAL
 			for (int i=0; i<nHuffmanCodesOfLengthI.length; ++i) {
 				int nCodesThisLength = nHuffmanCodesOfLengthI[i];
 				if (nCodesThisLength > 0) {
 					for (int j=0; j<nCodesThisLength; ++j) {
 						HUFFVAL[J] = ValueOfHuffmanCodeIJ[i][j];
+						if (HUFFVAL[J] > largestValue) {
+							largestValue = HUFFVAL[J];
+						}
 						++J;
 					}
 				}
@@ -129,6 +141,20 @@ public class HuffmanTable {
 			
 			}
 		}
+
+		// 10918-1 C.2 Figure C.3 Order_codes (for encoding)
+		// EFUFCO contains a code for each value in HUFFVAL
+		// EFUFSI contains a size for each value in HUFFVAL
+		EFUFCO = new int[largestValue+1];
+		EFUFSI = new int[largestValue+1];
+		{
+			for (int K=0; K<HUFFVAL.length; ++K) {
+				int I = HUFFVAL[K];
+				EFUFCO[I] = HUFFCODE[K];
+				EFUFSI[I] = HUFFSIZE[K];
+//System.err.println("HUFFVAL[K] "+HUFFVAL[K]+" EFUFCO[I] "+Integer.toBinaryString(EFUFCO[I])+" EFUFSI[I] "+Integer.toBinaryString(EFUFSI[I]));
+			}
+		}
 		
 		// 10918-1 C.2 Figure F.15 Decoder_tables generation
 		{


=====================================
com/pixelmed/codec/jpeg/Makefile
=====================================
@@ -25,17 +25,19 @@ testparse:	${OBJS}
 	rm -f /tmp/crap_copied.jpg
 	rm -f /tmp/crap_source.jpg
 	cp -v \
-		"/Volumes/Toshiba5TEnc/MDDX/20170320_Assembla2719_MissingJPEGEOI/corruptedfile147652_IM001_35.jpg" \
+		"$${HOME}/Documents/Clients/MDDX/Experiment20130905/crap.jpg" \
 		/tmp/crap_source.jpg
 	java -Djava.awt.headless=true  -cp ${PATHTOROOT} com.pixelmed.codec.jpeg.Parse \
 		/tmp/crap_source.jpg \
 		/tmp/crap_copied.jpg
 	# use make -i to continue to dump
+	ls -l /tmp/crap_source.jpg
+	ls -l /tmp/crap_copied.jpg
+	hexdump -C /tmp/crap_source.jpg | tail -3
+	hexdump -C /tmp/crap_copied.jpg | tail -3
 	@echo "Comparing source and copied ... may fail with EOF if padding after EOI marker that is not copied, or missing EOI marker is added, both of which are OK"
 	cmp /tmp/crap_source.jpg /tmp/crap_copied.jpg
 	@echo "Finished comparing"
-	hexdump -C /tmp/crap_source.jpg | tail -3
-	hexdump -C /tmp/crap_copied.jpg | tail -3
 
 # without restart and working
 		#"$${HOME}/Documents/Medical/compression/JPEG/10918-1/jpeg-6/testimg.jpg"
@@ -76,6 +78,17 @@ testdecompress:	${OBJS}
 		#"$${HOME}/Documents/Medical/compression/JPEG/10918-2/ITU T83/T83_process14/O1.JPG" \
 		#"$${HOME}/Documents/Medical/compression/JPEG/10918-2/ITU T83/T83_process14/O2.JPG" \
 
+testcannotdecompress:	${OBJS}
+	rm -f /tmp/crap_source.jpg
+	rm -f /tmp/crap_decompressed*.raw
+	cp -v \
+		"${PATHTOROOT}/${PATHTOTESTFILESFROMROOT}/smpte_8_q1.jpg" \
+		/tmp/crap_source.jpg
+	java -Djava.awt.headless=true  -cp ${PATHTOROOT} com.pixelmed.codec.jpeg.Parse \
+		/tmp/crap_source.jpg \
+		"" \
+		/tmp/crap_decompressed.raw
+
 testdecompressfromdicom:	${OBJS}
 	rm -f /tmp/crap_source.dcm
 	rm -f /tmp/crap_source.jpg


=====================================
com/pixelmed/codec/jpeg/Markers.java
=====================================
@@ -12,7 +12,7 @@ import java.util.Map;
  */
 public class Markers {
 
-	private static final String identString = "@(#) $Header: /userland/cvs/codec/com/pixelmed/codec/jpeg/Markers.java,v 1.4 2016/01/16 15:07:52 dclunie Exp $";
+	private static final String identString = "@(#) $Header: /userland/cvs/codec/com/pixelmed/codec/jpeg/Markers.java,v 1.5 2018/08/30 10:23:04 dclunie Exp $";
 	
 	// modified from dicom3tools appsrc/misc/jpegdump.cc ...
 
@@ -56,23 +56,23 @@ public class Markers {
 	public static final int RST5 = 0xffd5;
 	public static final int RST6 = 0xffd6;
 	public static final int RST7 = 0xffd7;
-
-	public static final int SOF0 = 0xffc0;
-	public static final int SOF1 = 0xffc1;
-	public static final int SOF2 = 0xffc2;
-	public static final int SOF3 = 0xffc3;
-
-	public static final int SOF5 = 0xffc5;
-	public static final int SOF6 = 0xffc6;
-	public static final int SOF7 = 0xffc7;
-
-	public static final int SOF9 = 0xffc9;
-	public static final int SOFA = 0xffca;
-	public static final int SOFB = 0xffcb;
-
-	public static final int SOFD = 0xffcd;
-	public static final int SOFE = 0xffce;
-	public static final int SOFF = 0xffcf;
+	
+	public static final int SOF0 = 0xffc0;	// Huffman Baseline DCT
+	public static final int SOF1 = 0xffc1;	// Huffman Extended Sequential DCT
+	public static final int SOF2 = 0xffc2;	// Huffman Progressive DCT
+	public static final int SOF3 = 0xffc3;	// Huffman Lossless Sequential
+
+	public static final int SOF5 = 0xffc5;	// Huffman Differential Sequential DCT
+	public static final int SOF6 = 0xffc6;	// Huffman Differential Progressive DCT
+	public static final int SOF7 = 0xffc7;	// Huffman Differential Lossless
+
+	public static final int SOF9 = 0xffc9;	// Arithmetic Extended Sequential DCT
+	public static final int SOFA = 0xffca;	// Arithmetic Progressive DCT
+	public static final int SOFB = 0xffcb;	// Arithmetic Lossless Sequential
+
+	public static final int SOFD = 0xffcd;	// Arithmetic Differential Sequential DCT
+	public static final int SOFE = 0xffce;	// Arithmetic Differential Progressive DCT
+	public static final int SOFF = 0xffcf;	// Arithmetic Differential Lossless
 
 	public static final int SOI = 0xffd8;
 	public static final int SOS = 0xffda;


=====================================
com/pixelmed/codec/jpeg/Parse.java
=====================================
@@ -32,7 +32,7 @@ import java.util.Vector;
  */
 public class Parse {
 
-	private static final String identString = "@(#) $Header: /userland/cvs/codec/com/pixelmed/codec/jpeg/Parse.java,v 1.18 2017/03/21 17:42:24 dclunie Exp $";
+	private static final String identString = "@(#) $Header: /userland/cvs/codec/com/pixelmed/codec/jpeg/Parse.java,v 1.19 2018/08/30 10:23:04 dclunie Exp $";
 	
 	private static int getLargestSamplingFactor(int[] factors) {
 		int largest = 0;
@@ -333,7 +333,12 @@ public class Parse {
 								sof = new MarkerSegmentSOF(marker,b,length-2);
 								if (dumping) System.err.print(sof);
 								if (copying) writeVariableLengthMarkerSegment(copiedRedactedOutputStream,marker,length,b);
-								if (decompressing) decompressedOutput.configureDecompressedOutput(sof);
+								if (decompressing) {
+									if (marker != Markers.SOF3) {
+										throw new Exception("Error - cannot decompress anything other than Huffman Lossless Sequential");
+									}
+									decompressedOutput.configureDecompressedOutput(sof);
+								}
 								break;
 							case Markers.DHT:
 								MarkerSegmentDHT dht = new MarkerSegmentDHT(b,length-2);


=====================================
com/pixelmed/codec/jpeg/package.html
=====================================
@@ -5,7 +5,7 @@
 
   @(#)package.html	1.60 98/01/27
 
-  Copyright (c) 2001-2016, David A. Clunie DBA Pixelmed Publishing. All rights reserved.
+  Copyright (c) 2001-2017, David A. Clunie DBA Pixelmed Publishing. All rights reserved.
 
 -->
 </head>
@@ -15,7 +15,8 @@
 <h2>Package Specification</h2>
 
 <p>This package contains a pure Java codec for selective block redaction of
-baseline process (8 bit, DCT, Huffman coded) JPEG images.</p>
+baseline process (8 bit, DCT, Huffman coded) JPEG images
+and decompression of lossless JPEG images.</p>
 
 <p>Development of this package was supported by funding from MDDX Research and Informatics.</p>
 


=====================================
com/pixelmed/imageio/Makefile
=====================================
@@ -30,3 +30,8 @@ testlosslessjpeg:
 	#dctoraw "$${HOME}/Pictures/Medical/JPEGLossless/eightbitrgbsingleframe.dcm" /tmp/crap.jpg	# DHT segment between SOI and SOF
 	#dctoraw "$${HOME}/Pictures/Medical/JPEGLossless/ivus_thousandsofframes_losslessjpeg.dcm" /tmp/crap.jpg	# DHT segment between SOI and SOF
 	java -cp ${PATHTOROOT}/pixelmed_imageio.jar com.pixelmed.imageio.TestImageIO /tmp/crap.jpg
+
+testnotlosslessjpeg:
+	# should fail rather than display black image
+	java -cp ${PATHTOROOT}/pixelmed_imageio.jar com.pixelmed.imageio.TestImageIO "${PATHTOROOT}/${PATHTOTESTFILESFROMROOT}/smpte_8_q1.jpg" jpeg-lossless 0
+


=====================================
debian/changelog
=====================================
@@ -1,3 +1,14 @@
+pixelmed-codec (20200328-1) unstable; urgency=medium
+
+  * Team upload.
+  * d/control: Update Homepage to direct location
+  * d/copyright: Update Source URL
+  * New upstream version 20200328
+  * d/control: Bump Std-Vers to 4.5.0; no changes needed
+  * d/rules: Remove BUILDDATE file
+
+ -- Mathieu Malaterre <malat at debian.org>  Fri, 31 Jul 2020 10:35:58 +0200
+
 pixelmed-codec (20170512-2) unstable; urgency=medium
 
   * debhelper 11


=====================================
debian/control
=====================================
@@ -8,10 +8,10 @@ Build-Depends-Indep: default-jdk,
                      javahelper,
                      junit4,
                      libhsqldb-java
-Standards-Version: 4.3.0
+Standards-Version: 4.5.0
 Vcs-Browser: https://salsa.debian.org/med-team/pixelmed-codec
 Vcs-Git: https://salsa.debian.org/med-team/pixelmed-codec.git
-Homepage: http://www.pixelmed.com
+Homepage: https://www.pixelmed.com/jpeg.html
 
 Package: libpixelmed-codec-java
 Architecture: all


=====================================
debian/copyright
=====================================
@@ -1,7 +1,7 @@
 Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
 Upstream-Name: PixelMed Java DICOM Toolkit
 Upstream-Contact: pixelmed_dicom at yahoogroups.com
-Source: http://www.dclunie.com/pixelmed/software/codec/
+Source: http://www.dclunie.com/pixelmed/software/codec/index.html
 
 Files: *
 Copyright: 2014, David A. Clunie DBA PixelMed Publishing. All rights reserved.


=====================================
debian/rules
=====================================
@@ -5,3 +5,6 @@ export JAVA_HOME=/usr/lib/jvm/default-java
 
 %:
 	dh $@ --with javahelper
+
+override_dh_clean:
+	dh_clean BUILDDATE



View it on GitLab: https://salsa.debian.org/med-team/pixelmed-codec/-/compare/d98334e066f42bfc7da1f3e7e8e1b4a855cc3205...ea1c3ab5545523d8dcc68f4938079fcda29b25ed

-- 
View it on GitLab: https://salsa.debian.org/med-team/pixelmed-codec/-/compare/d98334e066f42bfc7da1f3e7e8e1b4a855cc3205...ea1c3ab5545523d8dcc68f4938079fcda29b25ed
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20200731/9c8c36b0/attachment-0001.html>


More information about the debian-med-commit mailing list