re-arranging methods getRecordSize and getDataSize in Record / StandardRecord / ContinuableRecord hierarchy

git-svn-id: https://svn.apache.org/repos/asf/poi/trunk@723161 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Josh Micich 2008-12-04 00:08:45 +00:00
parent 2ebe3f7e32
commit 8be658a251
11 changed files with 140 additions and 185 deletions

View File

@ -34,7 +34,7 @@ abstract class DummyRecordBase extends Record {
public int serialize(int offset, byte[] data) {
throw new RecordFormatException("Cannot serialize a dummy record");
}
protected final int getDataSize() {
public final int getRecordSize() {
throw new RecordFormatException("Cannot serialize a dummy record");
}
}

View File

@ -130,7 +130,8 @@ public abstract class AbstractEscherHolderRecord extends Record {
}
return getRecordSize();
}
protected int getDataSize() {
public int getRecordSize() {
if (escherRecords.size() == 0 && rawData != null) {
return rawData.length;
}

View File

@ -17,7 +17,9 @@
package org.apache.poi.hssf.record;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianOutput;
/**
* Title: Continue Record(0x003C) - Helper class used primarily for SST Records <P>
@ -27,7 +29,7 @@ import org.apache.poi.util.LittleEndian;
* @author Andrew C. Oliver (acoliver at apache dot org)
* @author Csaba Nagy (ncsaba at yahoo dot com)
*/
public final class ContinueRecord extends Record {
public final class ContinueRecord extends StandardRecord {
public final static short sid = 0x003C;
private byte[] _data;
@ -39,70 +41,36 @@ public final class ContinueRecord extends Record {
return _data.length;
}
public int serialize(int offset, byte[] data) {
return write(data, offset, null, _data);
public void serialize(LittleEndianOutput out) {
out.write(_data);
}
/**
* get the data for continuation
* @return byte array containing all of the continued data
*/
public byte [] getData()
{
public byte[] getData() {
return _data;
}
public String toString()
{
public String toString() {
StringBuffer buffer = new StringBuffer();
buffer.append("[CONTINUE RECORD]\n");
buffer.append(" .id = ").append(Integer.toHexString(sid))
.append("\n");
buffer.append(" .data = ").append(HexDump.toHex(_data)).append("\n");
buffer.append("[/CONTINUE RECORD]\n");
return buffer.toString();
}
public short getSid()
{
public short getSid() {
return sid;
}
/**
* Fill the fields. Only thing is, this record has no fields --
*
* @param in the RecordInputstream to read the record from
*/
public ContinueRecord(RecordInputStream in)
{
_data = in.readRemainder();
public ContinueRecord(RecordInputStream in) {
_data = in.readRemainder();
}
public Object clone() {
return new ContinueRecord(_data);
}
/**
* Writes the full encoding of a Continue record without making an instance
*/
public static int write(byte[] destBuf, int destOffset, Byte initialDataByte, byte[] srcData) {
return write(destBuf, destOffset, initialDataByte, srcData, 0, srcData.length);
}
/**
* @param initialDataByte (optional - often used for unicode flag).
* If supplied, this will be written before srcData
* @return the total number of bytes written
*/
public static int write(byte[] destBuf, int destOffset, Byte initialDataByte, byte[] srcData, int srcOffset, int len) {
int totalLen = len + (initialDataByte == null ? 0 : 1);
LittleEndian.putUShort(destBuf, destOffset, sid);
LittleEndian.putUShort(destBuf, destOffset + 2, totalLen);
int pos = destOffset + 4;
if (initialDataByte != null) {
LittleEndian.putByte(destBuf, pos, initialDataByte.byteValue());
pos += 1;
}
System.arraycopy(srcData, srcOffset, destBuf, pos, len);
return 4 + totalLen;
return new ContinueRecord(_data);
}
}

View File

@ -81,9 +81,10 @@ public final class DrawingGroupRecord extends AbstractEscherHolderRecord {
public void processChildRecords() {
convertRawBytesToEscherRecords();
}
protected int getDataSize() {
// TODO - convert this to a RecordAggregate
return grossSizeFromDataSize( getRawDataSize() ) - 4;
public int getRecordSize() {
// TODO - convert this to a RecordAggregate
return grossSizeFromDataSize(getRawDataSize());
}
private int getRawDataSize() {

View File

@ -69,9 +69,8 @@ import org.apache.poi.util.POILogger;
*
* @author Glen Stampoultzis (glens at apache.org)
*/
public class EscherAggregate extends AbstractEscherHolderRecord
{
public static final short sid = 9876;
public final class EscherAggregate extends AbstractEscherHolderRecord {
public static final short sid = 9876; // not a real sid - dummy value
private static POILogger log = POILogFactory.getLogger(EscherAggregate.class);
public static final short ST_MIN = (short) 0;
@ -498,8 +497,8 @@ public class EscherAggregate extends AbstractEscherHolderRecord
return size;
}
protected int getDataSize() {
// TODO - convert this to RecordAggregate
public int getRecordSize() {
// TODO - convert this to RecordAggregate
convertUserModelToRecords();
List records = getEscherRecords();
int rawEscherSize = getEscherRecordSize( records );
@ -516,7 +515,7 @@ public class EscherAggregate extends AbstractEscherHolderRecord
Record r = (Record) iterator.next();
tailRecordSize += r.getRecordSize();
}
return drawingRecordSize + objRecordSize + tailRecordSize - 4;
return drawingRecordSize + objRecordSize + tailRecordSize;
}
/**

View File

@ -117,7 +117,7 @@ public final class LabelRecord extends Record implements CellValueRecordInterfac
public int serialize(int offset, byte [] data) {
throw new RecordFormatException("Label Records are supported READ ONLY...convert to LabelSST");
}
protected int getDataSize() {
public int getRecordSize() {
throw new RecordFormatException("Label Records are supported READ ONLY...convert to LabelSST");
}

View File

@ -119,9 +119,9 @@ public final class ObjRecord extends Record {
return sb.toString();
}
protected int getDataSize() {
public int getRecordSize() {
if (_uninterpretedData != null) {
return _uninterpretedData.length;
return _uninterpretedData.length + 4;
}
int size = 0;
for (int i=subrecords.size()-1; i>=0; i--) {
@ -137,7 +137,7 @@ public final class ObjRecord extends Record {
size++;
}
}
return size;
return size + 4;
}
public int serialize(int offset, byte[] data) {

View File

@ -21,89 +21,76 @@ import java.io.ByteArrayInputStream;
/**
* Title: Record
* Description: All HSSF Records inherit from this class. It
* populates the fields common to all records (id, size and data).
* Subclasses should be sure to validate the id,
* Company:
* Description: All HSSF Records inherit from this class.
* @author Andrew C. Oliver
* @author Marc Johnson (mjohnson at apache dot org)
* @author Jason Height (jheight at chariot dot net dot au)
*/
public abstract class Record extends RecordBase {
/**
* instantiates a blank record strictly for ID matching
*/
protected Record() {
// no fields to initialise
}
protected Record()
{
}
/**
* called by the class that is responsible for writing this sucker.
* Subclasses should implement this so that their data is passed back in a
* byte array.
*
* @return byte array containing instance data
*/
public final byte[] serialize() {
byte[] retval = new byte[ getRecordSize() ];
/**
* called by the class that is responsible for writing this sucker.
* Subclasses should implement this so that their data is passed back in a
* byte array.
*
* @return byte array containing instance data
*/
serialize(0, retval);
return retval;
}
public final byte[] serialize() {
byte[] retval = new byte[ getRecordSize() ];
/**
* get a string representation of the record (for biffview/debugging)
*/
public String toString() {
return super.toString();
}
serialize(0, retval);
return retval;
}
/**
* return the non static version of the id for this record.
*/
public final int getRecordSize() {
return 4 + getDataSize();
}
/**
* @return the size of the data portion of this record
* (does not include initial 4 bytes for sid and size)
*/
protected abstract int getDataSize();
/**
* get a string representation of the record (for biffview/debugging)
*/
public String toString()
{
return super.toString();
}
public abstract short getSid();
/**
* return the non static version of the id for this record.
*/
public Object clone() {
if (false) {
// TODO - implement clone in a more standardised way
try {
return super.clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeException(e);
}
}
throw new RuntimeException("The class "+getClass().getName()+" needs to define a clone method");
}
public abstract short getSid();
/**
* Clone the current record, via a call to serialize
* it, and another to create a new record from the
* bytes.
* May only be used for classes which don't have
* internal counts / ids in them. For those which
* do, a full model-aware cloning is needed, which
* allocates new ids / counts as needed.
*/
public Record cloneViaReserialise() {
// Do it via a re-serialization
// It's a cheat, but it works...
byte[] b = serialize();
RecordInputStream rinp = new RecordInputStream(new ByteArrayInputStream(b));
rinp.nextRecord();
public Object clone() {
throw new RuntimeException("The class "+getClass().getName()+" needs to define a clone method");
}
/**
* Clone the current record, via a call to serialise
* it, and another to create a new record from the
* bytes.
* May only be used for classes which don't have
* internal counts / ids in them. For those which
* do, a full record-aware serialise is needed, which
* allocates new ids / counts as needed.
*/
public Record cloneViaReserialise()
{
// Do it via a re-serialise
// It's a cheat, but it works...
byte[] b = serialize();
RecordInputStream rinp = new RecordInputStream(
new ByteArrayInputStream(b)
);
rinp.nextRecord();
Record[] r = RecordFactory.createRecord(rinp);
if(r.length != 1) {
throw new IllegalStateException("Re-serialised a record to clone it, but got " + r.length + " records back!");
}
return r[0];
}
Record[] r = RecordFactory.createRecord(rinp);
if(r.length != 1) {
throw new IllegalStateException("Re-serialised a record to clone it, but got " + r.length + " records back!");
}
return r[0];
}
}

View File

@ -27,6 +27,10 @@ import org.apache.poi.util.LittleEndianOutput;
* @author Josh Micich
*/
public abstract class StandardRecord extends Record {
protected abstract int getDataSize();
public final int getRecordSize() {
return 4 + getDataSize();
}
@Override
public final int serialize(int offset, byte[] data) {
int dataSize = getDataSize();

View File

@ -47,15 +47,15 @@ public abstract class ContinuableRecord extends Record {
/**
* @return four less than the total length of the encoded record(s)
* (in the case when no {@link ContinueRecord} is needed, this is the
* same ushort value that gets encoded after the record sid
* @return the total length of the encoded record(s)
* (Note - if any {@link ContinueRecord} is required, this result includes the
* size of those too)
*/
protected final int getDataSize() {
public final int getRecordSize() {
ContinuableRecordOutput out = ContinuableRecordOutput.createForCountingOnly();
serialize(out);
out.terminate();
return out.getTotalSize() - 4;
return out.getTotalSize();
}
public final int serialize(int offset, byte[] data) {

View File

@ -30,12 +30,13 @@ import org.apache.poi.hssf.model.HSSFFormulaParser;
import org.apache.poi.hssf.model.Sheet;
import org.apache.poi.hssf.record.NameRecord;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.RecordBase;
import org.apache.poi.hssf.record.RecordFormatException;
import org.apache.poi.hssf.record.formula.Area3DPtg;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.TempFile;
/**
*
* Tests for {@link HSSFWorkbook}
*/
public final class TestHSSFWorkbook extends TestCase {
private static HSSFWorkbook openSample(String sampleFileName) {
@ -75,31 +76,25 @@ public final class TestHSSFWorkbook extends TestCase {
b.createSheet("Sheet1");
b.createSheet();
b.createSheet("name1");
try
{
try {
b.createSheet("name1");
fail();
}
catch ( IllegalArgumentException pass )
{
} catch (IllegalArgumentException pass) {
// expected during successful test
}
b.createSheet();
try
{
b.setSheetName( 3, "name1" );
try {
b.setSheetName(3, "name1");
fail();
}
catch ( IllegalArgumentException pass )
{
} catch (IllegalArgumentException pass) {
// expected during successful test
}
try
{
b.setSheetName( 3, "name1" );
try {
b.setSheetName(3, "name1");
fail();
}
catch ( IllegalArgumentException pass )
{
} catch (IllegalArgumentException pass) {
// expected during successful test
}
b.setSheetName( 3, "name2" );
@ -383,17 +378,17 @@ public final class TestHSSFWorkbook extends TestCase {
assertEquals("active", expectedActive, sheet.isActive());
assertEquals("selected", expectedSelected, sheet.isSelected());
}
/**
* If Sheet.getSize() returns a different result to Sheet.serialize(), this will cause the BOF
* records to be written with invalid offset indexes. Excel does not like this, and such
* records to be written with invalid offset indexes. Excel does not like this, and such
* errors are particularly hard to track down. This test ensures that HSSFWorkbook throws
* a specific exception as soon as the situation is detected. See bugzilla 45066
*/
public void testSheetSerializeSizeMismatch_bug45066() {
HSSFWorkbook wb = new HSSFWorkbook();
Sheet sheet = wb.createSheet("Sheet1").getSheet();
List sheetRecords = sheet.getRecords();
List<RecordBase> sheetRecords = sheet.getRecords();
// one way (of many) to cause the discrepancy is with a badly behaved record:
sheetRecords.add(new BadlyBehavedRecord());
// There is also much logic inside Sheet that (if buggy) might also cause the discrepancy
@ -405,98 +400,98 @@ public final class TestHSSFWorkbook extends TestCase {
assertTrue(e.getMessage().startsWith("Actual serialized sheet size"));
}
}
/**
* Checks that us and HSSFName play nicely with named ranges
* that point to deleted sheets
*/
public void testNamesToDeleteSheets() throws Exception {
public void testNamesToDeleteSheets() {
HSSFWorkbook b = openSample("30978-deleted.xls");
assertEquals(3, b.getNumberOfNames());
// Sheet 2 is deleted
assertEquals("Sheet1", b.getSheetName(0));
assertEquals("Sheet3", b.getSheetName(1));
Area3DPtg ptg;
NameRecord nr;
HSSFName n;
/* ======= Name pointing to deleted sheet ====== */
// First at low level
nr = b.getWorkbook().getNameRecord(0);
assertEquals("On2", nr.getNameText());
assertEquals(0, nr.getSheetNumber());
assertEquals(1, nr.getExternSheetNumber());
assertEquals(1, nr.getNameDefinition().length);
ptg = (Area3DPtg)nr.getNameDefinition()[0];
assertEquals(1, ptg.getExternSheetIndex());
assertEquals(0, ptg.getFirstColumn());
assertEquals(0, ptg.getFirstRow());
assertEquals(0, ptg.getLastColumn());
assertEquals(2, ptg.getLastRow());
// Now at high level
n = b.getNameAt(0);
assertEquals("On2", n.getNameName());
assertEquals("", n.getSheetName());
assertEquals("#REF!$A$1:$A$3", n.getReference());
assertEquals("#REF!$A$1:$A$3", n.getRefersToFormula());
/* ======= Name pointing to 1st sheet ====== */
// First at low level
nr = b.getWorkbook().getNameRecord(1);
assertEquals("OnOne", nr.getNameText());
assertEquals(0, nr.getSheetNumber());
assertEquals(0, nr.getExternSheetNumber());
assertEquals(1, nr.getNameDefinition().length);
ptg = (Area3DPtg)nr.getNameDefinition()[0];
assertEquals(0, ptg.getExternSheetIndex());
assertEquals(0, ptg.getFirstColumn());
assertEquals(2, ptg.getFirstRow());
assertEquals(0, ptg.getLastColumn());
assertEquals(3, ptg.getLastRow());
// Now at high level
n = b.getNameAt(1);
assertEquals("OnOne", n.getNameName());
assertEquals("Sheet1", n.getSheetName());
assertEquals("Sheet1!$A$3:$A$4", n.getReference());
assertEquals("Sheet1!$A$3:$A$4", n.getRefersToFormula());
/* ======= Name pointing to 3rd sheet ====== */
// First at low level
nr = b.getWorkbook().getNameRecord(2);
assertEquals("OnSheet3", nr.getNameText());
assertEquals(0, nr.getSheetNumber());
assertEquals(2, nr.getExternSheetNumber());
assertEquals(1, nr.getNameDefinition().length);
ptg = (Area3DPtg)nr.getNameDefinition()[0];
assertEquals(2, ptg.getExternSheetIndex());
assertEquals(0, ptg.getFirstColumn());
assertEquals(0, ptg.getFirstRow());
assertEquals(0, ptg.getLastColumn());
assertEquals(1, ptg.getLastRow());
// Now at high level
n = b.getNameAt(2);
assertEquals("OnSheet3", n.getNameName());
assertEquals("Sheet3", n.getSheetName());
assertEquals("Sheet3!$A$1:$A$2", n.getReference());
assertEquals("Sheet3!$A$1:$A$2", n.getRefersToFormula());
}
/**
* result returned by getRecordSize() differs from result returned by serialize()
*/
private static final class BadlyBehavedRecord extends Record {
public BadlyBehavedRecord() {
//
//
}
public short getSid() {
return 0x777;
@ -504,11 +499,11 @@ public final class TestHSSFWorkbook extends TestCase {
public int serialize(int offset, byte[] data) {
return 4;
}
protected int getDataSize() {
return 4;
public int getRecordSize() {
return 8;
}
}
/**
* The sample file provided with bug 45582 seems to have one extra byte after the EOFRecord
*/
@ -521,7 +516,7 @@ public final class TestHSSFWorkbook extends TestCase {
}
}
}
/**
* Test to make sure that NameRecord.getSheetNumber() is interpreted as a
* 1-based sheet tab index (not a 1-based extern sheet index)
@ -535,12 +530,12 @@ public final class TestHSSFWorkbook extends TestCase {
nr = wb.getWorkbook().getNameRecord(2);
// TODO - render full row and full column refs properly
assertEquals("Sheet2!$A$1:$IV$1", HSSFFormulaParser.toFormulaString(wb, nr.getNameDefinition())); // 1:1
try {
wb.setRepeatingRowsAndColumns(3, 4, 5, 8, 11);
} catch (RuntimeException e) {
if (e.getMessage().equals("Builtin (7) already exists for sheet (4)")) {
// there was a problem in the code which locates the existing print titles name record
// there was a problem in the code which locates the existing print titles name record
throw new RuntimeException("Identified bug 45720b");
}
throw e;