Improved support for chunks and chunk streams, plus tests

git-svn-id: https://svn.apache.org/repos/asf/jakarta/poi/trunk@548870 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Nick Burch 2007-06-19 22:41:33 +00:00
parent 0df13bee23
commit 456d639e55
11 changed files with 267 additions and 10 deletions

View File

@ -38,6 +38,21 @@ public class Chunk {
this.contents = contents; this.contents = contents;
} }
public byte[] _getContents() {
return contents;
}
public ChunkHeader getHeader() {
return header;
}
/** Gets the separator between this chunk and the next, if it exists */
public ChunkSeparator getSeparator() {
return separator;
}
/** Gets the trailer for this chunk, if it exists */
public ChunkTrailer getTrailer() {
return trailer;
}
/** /**
* Returns the size of the chunk, including any * Returns the size of the chunk, including any
* headers, trailers and separators. * headers, trailers and separators.

View File

@ -40,19 +40,48 @@ public class ChunkFactory {
// Create the header // Create the header
ChunkHeader header = ChunkHeader header =
ChunkHeader.createChunkHeader(version, data, offset); ChunkHeader.createChunkHeader(version, data, offset);
int endOfDataPos = offset + header.getLength() + header.getSizeInBytes();
// Check we have enough data, and tweak the header size
// as required
if(endOfDataPos > data.length) {
System.err.println("Header called for " + header.getLength() +" bytes, but that would take us passed the end of the data!");
endOfDataPos = data.length;
header.length = data.length - offset - header.getSizeInBytes();
if(header.hasTrailer()) {
header.length -= 8;
endOfDataPos -= 8;
}
if(header.hasSeparator()) {
header.length -= 4;
endOfDataPos -= 4;
}
}
// Create the trailer and separator, if required // Create the trailer and separator, if required
ChunkTrailer trailer = null; ChunkTrailer trailer = null;
ChunkSeparator separator = null; ChunkSeparator separator = null;
if(header.hasTrailer()) { if(header.hasTrailer()) {
trailer = new ChunkTrailer( if(endOfDataPos <= data.length-8) {
data, header.getLength() + header.getSizeInBytes()); trailer = new ChunkTrailer(
if(header.hasSeparator()) { data, endOfDataPos);
separator = new ChunkSeparator( endOfDataPos += 8;
data, header.getLength() + header.getSizeInBytes() + 8); } else {
System.err.println("Header claims a length to " + endOfDataPos + " there's then no space for the trailer in the data (" + data.length + ")");
} }
} }
if(header.hasSeparator()) {
if(endOfDataPos <= data.length-4) {
separator = new ChunkSeparator(
data, endOfDataPos);
} else {
System.err.println("Header claims a length to " + endOfDataPos + " there's then no space for the separator in the data (" + data.length + ")");
}
}
// Now, create the chunk // Now, create the chunk
byte[] contents = new byte[header.getLength()]; byte[] contents = new byte[header.getLength()];
System.arraycopy(data, offset+header.getSizeInBytes(), contents, 0, contents.length); System.arraycopy(data, offset+header.getSizeInBytes(), contents, 0, contents.length);

View File

@ -29,7 +29,7 @@ public class ChunkHeaderV11 extends ChunkHeaderV6 {
if(unknown2 == 2 && unknown3 == 0x55) { return true; } if(unknown2 == 2 && unknown3 == 0x55) { return true; }
if(unknown2 == 2 && unknown3 == 0x54 && type == 0xaa) { return true; } if(unknown2 == 2 && unknown3 == 0x54 && type == 0xaa) { return true; }
if(unknown2 == 3 && unknown3 == 0x50 && type == 0xaa) { return true; } if(unknown2 == 3 && unknown3 != 0x50) { return true; }
if(type == 0x69) { return true; } if(type == 0x69) { return true; }
return false; return false;

View File

@ -67,6 +67,16 @@ public class VSDDumper {
System.out.println(ind + " Compressed is\t" + ptr.destinationCompressed()); System.out.println(ind + " Compressed is\t" + ptr.destinationCompressed());
System.out.println(ind + " Stream is\t" + stream.getClass().getCanonicalName()); System.out.println(ind + " Stream is\t" + stream.getClass().getCanonicalName());
byte[] db = stream._getStore()._getContents();
String ds = "";
if(db.length >= 8) {
for(int i=0; i<8; i++) {
if(i>0) ds += ", ";
ds += db[i];
}
}
System.out.println(ind + " First few bytes are\t" + ds);
if(stream instanceof PointerContainingStream) { if(stream instanceof PointerContainingStream) {
PointerContainingStream pcs = (PointerContainingStream)stream; PointerContainingStream pcs = (PointerContainingStream)stream;
System.out.println(ind + " Has " + System.out.println(ind + " Has " +

View File

@ -30,6 +30,9 @@ public class ChunkStream extends Stream {
protected ChunkStream(Pointer pointer, StreamStore store, ChunkFactory chunkFactory) { protected ChunkStream(Pointer pointer, StreamStore store, ChunkFactory chunkFactory) {
super(pointer, store); super(pointer, store);
this.chunkFactory = chunkFactory; this.chunkFactory = chunkFactory;
// For compressed stores, we require all of the data
store.copyBlockHeaderToContents();
} }
public Chunk[] getChunks() { return chunks; } public Chunk[] getChunks() { return chunks; }

View File

@ -25,7 +25,7 @@ import org.apache.poi.hdgf.LZW4HDGF;
* A StreamStore where the data on-disk is compressed, * A StreamStore where the data on-disk is compressed,
* using the crazy Visio LZW * using the crazy Visio LZW
*/ */
class CompressedStreamStore extends StreamStore { public class CompressedStreamStore extends StreamStore {
/** The raw, compressed contents */ /** The raw, compressed contents */
private byte[] compressedContents; private byte[] compressedContents;
/** /**
@ -33,6 +33,7 @@ class CompressedStreamStore extends StreamStore {
* real contents in the de-compressed data * real contents in the de-compressed data
*/ */
private byte[] blockHeader = new byte[4]; private byte[] blockHeader = new byte[4];
private boolean blockHeaderInContents = false;
protected byte[] _getCompressedContents() { return compressedContents; } protected byte[] _getCompressedContents() { return compressedContents; }
protected byte[] _getBlockHeader() { return blockHeader; } protected byte[] _getBlockHeader() { return blockHeader; }
@ -54,6 +55,19 @@ class CompressedStreamStore extends StreamStore {
super(decompressedData[1], 0, decompressedData[1].length); super(decompressedData[1], 0, decompressedData[1].length);
blockHeader = decompressedData[0]; blockHeader = decompressedData[0];
} }
/**
* Some kinds of streams expect their 4 byte header to be
* on the front of the contents.
* They can call this to have it sorted.
*/
protected void copyBlockHeaderToContents() {
if(blockHeaderInContents) return;
prependContentsWith(blockHeader);
blockHeaderInContents = true;
}
/** /**
* Decompresses the given data, returning it as header + contents * Decompresses the given data, returning it as header + contents

View File

@ -89,7 +89,7 @@ public class PointerContainingStream extends Stream {
// Process chunk streams into their chunks // Process chunk streams into their chunks
if(childStreams[i] instanceof ChunkStream) { if(childStreams[i] instanceof ChunkStream) {
ChunkStream child = (ChunkStream)childStreams[i]; ChunkStream child = (ChunkStream)childStreams[i];
// child.findChunks(); child.findChunks();
} }
// Recurse into pointer containing streams // Recurse into pointer containing streams

View File

@ -35,6 +35,7 @@ public abstract class Stream {
public Pointer getPointer() { return pointer; } public Pointer getPointer() { return pointer; }
protected StreamStore getStore() { return store; } protected StreamStore getStore() { return store; }
public StreamStore _getStore() { return store; }
public int _getContentsLength() { return store.getContents().length; } public int _getContentsLength() { return store.getContents().length; }
/** /**

View File

@ -21,7 +21,7 @@ package org.apache.poi.hdgf.streams;
* handles de-compressing it as required. * handles de-compressing it as required.
* In future, may also handle writing it back out again * In future, may also handle writing it back out again
*/ */
class StreamStore { public class StreamStore {
private byte[] contents; private byte[] contents;
/** /**
@ -32,5 +32,14 @@ class StreamStore {
System.arraycopy(data, offset, contents, 0, length); System.arraycopy(data, offset, contents, 0, length);
} }
protected void prependContentsWith(byte[] b) {
byte[] newContents = new byte[contents.length + b.length];
System.arraycopy(b, 0, newContents, 0, b.length);
System.arraycopy(contents, 0, newContents, b.length, contents.length);
contents = newContents;
}
protected void copyBlockHeaderToContents() {}
protected byte[] getContents() { return contents; } protected byte[] getContents() { return contents; }
public byte[] _getContents() { return contents; }
} }

View File

@ -0,0 +1,160 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hdgf.chunks;
import junit.framework.TestCase;
public class TestChunks extends TestCase {
public static final byte[] data_a = new byte[] { 70, 0, 0, 0,
-1, -1, -1, -1, 2, 0, 0, 0, 68, 0, 0, 0, 0, 0, 0, 68, 0, 0, 0, 0, 0,
0, 0, 2, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 0, 0, 0, -1, -1, -1, -1, 0, 0, 0,
0, -1, -1, -1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 104, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0,
0, 36, 0, 0, 0, 1, 0, 84, 24, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 2, 0, 0, 0, 0, 0, 0, 0, -110, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-124, 0, 0, 0, 2, 0, 85, 73, 0, 0, 0, 0, 0, 0, -56, 63, 73, 0, 0, 0,
0, 0, 0, -64, 63, 63, 0, 0, 0, 0, 0, 0, -64, 63, 63, 0, 0, 0, 0, 0, 0,
-64, -65, 73, 0, 0, 0, 0, 0, 0, -16, 63, 73, 0, 0, 0, 0, 0, 0, -16, 63,
4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 80,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -16, 63, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1, -1, 3, 0, 0, 32, 0, 0, 0, 0, 0, -73, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
79, 0, 0, 0, 2, 0, 85, 32, 32, 64, 0, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0,
0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0,
8, 8, 65, 0, 0, 0, 0, 0, 0, 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0,
0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 1, -13, 15, 0, 0, 0, 0,
-56, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 72, 0, 0, 0, 2, 0, 85, 63, 0, 0,
0, 0, 0, 0, -48, 63, 63, 0, 0, 0, 0, 0, 0, -48, 63, 63, 0, 0, 0, 0, 0,
0, -48, 63, 63, 0, 0, 0, 0, 0, 0, -48, 63, 0, 0, 0, 0, 0, 0, -16, 63,
0, 0, 0, 0, 0, 0, -16, 63, 1, 0, 1, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0,
0, 1, -1, 15, 7, 0, 0, 0, 0, 101, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 28,
0, 0, 0, 1, 0, 84, 24, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0, 0, 0, 2, 0, 85, 5, 0, 0,
0, 72, 0, 0, 0, 0, 0, 0, 0, 0, 0
};
public static final byte[] data_b = new byte[] { 70, 0, 0, 0,
-1, -1, -1, -1, 3, 0, 0, 0, 68, 0, 0, 0, 0, 0, 0, 68, 0, 0, 0, 0, 0,
0, 0, 2, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 0, 0, 0, -1, -1, -1, -1,
0, 0, 0, 0, -1, -1, -1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 104, 0, 0, 0, 0, 0, 0,
0, 2, 0, 0, 0, 32, 0, 0, 0, 1, 0, 84, 24, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0, -110, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -124,
0, 0, 0, 2, 0, 85, 63, 0, 0, 0, 0, 0, 0, 33, 64, 63, 0, 0, 0, 0, 0, 0,
38, 64, 63, 0, 0, 0, 0, 0, 0, -64, 63, 63, 0, 0, 0, 0, 0, 0, -64, -65,
73, 0, 0, 0, 0, 0, 0, -16, 63, 73, 0, 0, 0, 0, 0, 0, -16, 63, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 80, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -16, 63, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 3,
0, 4, 32, 0, 0, 0, 0, 0, -56, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 72, 0, 0,
0, 2, 0, 85, 63, 0, 0, 0, 0, 0, 0, -48, 63, 63, 0, 0, 0, 0, 0, 0, -48,
63, 63, 0, 0, 0, 0, 0, 0, -48, 63, 63, 0, 0, 0, 0, 0, 0, -48, 63, 0, 0,
0, 0, 0, 0, -16, 63, 0, 0, 0, 0, 0, 0, -16, 63, 1, 0, 1, 0, 0, 1, 1, 0,
7, 0, 0, 0, 0, 0, 0, 0, 1, -1, 15, 7, 0, 0, 0, 0, 101, 0, 0, 0, 1, 0, 0,
0, 1, 0, 0, 0, 28, 0, 0, 0, 1, 0, 84, 24, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, -125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0, 0, 0, 2, 0,
85, 5, 0, 0, 0, 78, 0, 0, 0, 0, 0, 0, 0, 0, 0, -55, 0, 0, 0, 2, 0, 0, 0,
0, 0, 0, 0, -122, 0, 0, 0, 1, 0, 80, 1, 0, 0, 0, 60, 0, 0, 0, 60, 0, 0,
0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0
};
public void testChunkHeaderA() throws Exception {
ChunkFactory cf = new ChunkFactory(11);
ChunkHeader h =
ChunkHeader.createChunkHeader(11, data_a, 0);
assertTrue(h instanceof ChunkHeaderV11);
ChunkHeaderV11 header = (ChunkHeaderV11)h;
assertEquals(70, header.getType());
assertEquals(-1, header.getId());
assertEquals(2, header.getUnknown1());
assertEquals(68, header.getLength());
assertEquals(0, header.getUnknown2());
assertEquals(0, header.getUnknown3());
assertTrue(header.hasTrailer());
assertTrue(header.hasSeparator());
}
public void testChunkHeaderB() throws Exception {
ChunkFactory cf = new ChunkFactory(11);
ChunkHeader h =
ChunkHeader.createChunkHeader(11, data_b, 0);
assertTrue(h instanceof ChunkHeaderV11);
ChunkHeaderV11 header = (ChunkHeaderV11)h;
assertEquals(70, header.getType());
assertEquals(-1, header.getId());
assertEquals(3, header.getUnknown1());
assertEquals(68, header.getLength());
assertEquals(0, header.getUnknown2());
assertEquals(0, header.getUnknown3());
assertTrue(header.hasTrailer());
assertTrue(header.hasSeparator());
}
public void testOneChunk() throws Exception {
ChunkFactory cf = new ChunkFactory(11);
cf.createChunk(data_a, 0);
cf.createChunk(data_b, 0);
Chunk chunk = cf.createChunk(data_a, 0);
assertNotNull(chunk.getHeader());
assertNotNull(chunk.getTrailer());
assertNotNull(chunk.getSeparator());
// Should be 19 + length + 8 + 4 big
assertEquals(68, chunk.getHeader().getLength());
assertEquals(68+19+8+4, chunk.getOnDiskSize());
}
public void testManyChunks() throws Exception {
ChunkFactory cf = new ChunkFactory(11);
Chunk chunk;
int offset = 0;
chunk = cf.createChunk(data_a, offset);
assertNotNull(chunk.getHeader());
assertNotNull(chunk.getTrailer());
assertNotNull(chunk.getSeparator());
offset += chunk.getOnDiskSize();
chunk = cf.createChunk(data_a, offset);
assertNotNull(chunk.getHeader());
assertNotNull(chunk.getTrailer());
assertNotNull(chunk.getSeparator());
offset += chunk.getOnDiskSize();
chunk = cf.createChunk(data_a, offset);
assertNotNull(chunk.getHeader());
assertNull(chunk.getTrailer());
assertNull(chunk.getSeparator());
offset += chunk.getOnDiskSize();
}
}

View File

@ -73,7 +73,23 @@ public class TestStreamComplex extends StreamTest {
} }
public void testChunks() { public void testChunks() {
Pointer trailerPtr = ptrFactory.createPointer(contents, trailerPointerAt);
TrailerStream ts = (TrailerStream)
Stream.createStream(trailerPtr, contents, chunkFactory, ptrFactory);
// Should be 7th one
Pointer chunkPtr = ts.getChildPointers()[5];
assertFalse(chunkPtr.destinationHasStrings());
assertTrue(chunkPtr.destinationHasChunks());
assertFalse(chunkPtr.destinationHasPointers());
Stream stream = Stream.createStream(chunkPtr, contents, chunkFactory, ptrFactory);
assertNotNull(stream);
assertTrue(stream instanceof ChunkStream);
// Now find the chunks within it
ChunkStream cs = (ChunkStream)stream;
cs.findChunks();
} }
public void testStrings() { public void testStrings() {