Fix for bug 45778 - made ObjRecord read sub-record ftLbsData properly

git-svn-id: https://svn.apache.org/repos/asf/poi/trunk@707450 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Josh Micich 2008-10-23 19:08:42 +00:00
parent 6189c8d1fc
commit 9f1ad85e0e
24 changed files with 1152 additions and 554 deletions

View File

@ -37,6 +37,7 @@
<!-- Don't forget to update status.xml too! -->
<release version="3.5-beta4" date="2008-??-??">
<action dev="POI-DEVELOPERS" type="fix">45778 - fixed ObjRecord to read ftLbsData properly</action>
<action dev="POI-DEVELOPERS" type="fix">46053 - fixed evaluation cache dependency analysis when changing blank cells</action>
</release>
<release version="3.2-FINAL" date="2008-10-19">

View File

@ -34,6 +34,7 @@
<!-- Don't forget to update changes.xml too! -->
<changes>
<release version="3.5-beta4" date="2008-??-??">
<action dev="POI-DEVELOPERS" type="fix">45778 - fixed ObjRecord to read ftLbsData properly</action>
<action dev="POI-DEVELOPERS" type="fix">46053 - fixed evaluation cache dependency analysis when changing blank cells</action>
</release>
<release version="3.2-FINAL" date="2008-10-19">

View File

@ -20,7 +20,8 @@ package org.apache.poi.hssf.record;
import org.apache.poi.util.BitField;
import org.apache.poi.util.BitFieldFactory;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianInput;
import org.apache.poi.util.LittleEndianOutput;
/**
* The common object data record is used to store all common preferences for an excel object.<p/>
@ -80,8 +81,10 @@ public final class CommonObjectDataSubRecord extends SubRecord {
}
public CommonObjectDataSubRecord(RecordInputStream in)
{
public CommonObjectDataSubRecord(LittleEndianInput in, int size) {
if (size != 18) {
throw new RecordFormatException("Expected size 18 but got (" + size + ")");
}
field_1_objectType = in.readShort();
field_2_objectId = in.readShort();
field_3_option = in.readShort();
@ -128,26 +131,21 @@ public final class CommonObjectDataSubRecord extends SubRecord {
return buffer.toString();
}
public int serialize(int offset, byte[] data)
{
int pos = 0;
public void serialize(LittleEndianOutput out) {
LittleEndian.putShort(data, 0 + offset, sid);
LittleEndian.putShort(data, 2 + offset, (short)(getRecordSize() - 4));
out.writeShort(sid);
out.writeShort(getDataSize());
LittleEndian.putShort(data, 4 + offset + pos, field_1_objectType);
LittleEndian.putShort(data, 6 + offset + pos, field_2_objectId);
LittleEndian.putShort(data, 8 + offset + pos, field_3_option);
LittleEndian.putInt(data, 10 + offset + pos, field_4_reserved1);
LittleEndian.putInt(data, 14 + offset + pos, field_5_reserved2);
LittleEndian.putInt(data, 18 + offset + pos, field_6_reserved3);
return getRecordSize();
out.writeShort(field_1_objectType);
out.writeShort(field_2_objectId);
out.writeShort(field_3_option);
out.writeInt(field_4_reserved1);
out.writeInt(field_5_reserved2);
out.writeInt(field_6_reserved3);
}
public int getRecordSize()
{
return 4 + 2 + 2 + 2 + 4 + 4 + 4;
protected int getDataSize() {
return 2 + 2 + 2 + 4 + 4 + 4;
}
public short getSid()

View File

@ -26,6 +26,8 @@ import org.apache.poi.hssf.record.formula.Ref3DPtg;
import org.apache.poi.hssf.record.formula.RefPtg;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianInput;
import org.apache.poi.util.LittleEndianOutput;
import org.apache.poi.util.StringUtil;
/**
@ -68,18 +70,23 @@ public final class EmbeddedObjectRefSubRecord extends SubRecord {
return sid;
}
public EmbeddedObjectRefSubRecord(RecordInputStream in) {
public EmbeddedObjectRefSubRecord(LittleEndianInput in, int size) {
// TODO use 'size' param
// Much guess-work going on here due to lack of any documentation.
// See similar source code in OOO:
// http://lxr.go-oo.org/source/sc/sc/source/filter/excel/xiescher.cxx
// 1223 void XclImpOleObj::ReadPictFmla( XclImpStream& rStrm, sal_uInt16 nRecSize )
int streamIdOffset = in.readShort(); // OOO calls this 'nFmlaLen'
int remaining = size - LittleEndian.SHORT_SIZE;
int dataLenAfterFormula = in.remaining() - streamIdOffset;
int dataLenAfterFormula = remaining - streamIdOffset;
int formulaSize = in.readUShort();
remaining -= LittleEndian.SHORT_SIZE;
field_1_unknown_int = in.readInt();
remaining -= LittleEndian.INT_SIZE;
byte[] formulaRawBytes = readRawData(in, formulaSize);
remaining -= formulaSize;
field_2_refPtg = readRefPtg(formulaRawBytes);
if (field_2_refPtg == null) {
// common case
@ -91,15 +98,18 @@ public final class EmbeddedObjectRefSubRecord extends SubRecord {
}
int stringByteCount;
if (in.remaining() >= dataLenAfterFormula + 3) {
if (remaining >= dataLenAfterFormula + 3) {
int tag = in.readByte();
remaining -= LittleEndian.BYTE_SIZE;
if (tag != 0x03) {
throw new RecordFormatException("Expected byte 0x03 here");
}
int nChars = in.readUShort();
remaining -= LittleEndian.SHORT_SIZE;
if (nChars > 0) {
// OOO: the 4th way Xcl stores a unicode string: not even a Grbit byte present if length 0
field_3_unicode_flag = ( in.readByte() & 0x01 ) != 0;
field_3_unicode_flag = ( in.readByte() & 0x01 ) != 0;
remaining -= LittleEndian.BYTE_SIZE;
if (field_3_unicode_flag) {
field_4_ole_classname = in.readUnicodeLEString(nChars);
stringByteCount = nChars * 2;
@ -115,28 +125,34 @@ public final class EmbeddedObjectRefSubRecord extends SubRecord {
field_4_ole_classname = null;
stringByteCount = 0;
}
remaining -= stringByteCount;
// Pad to next 2-byte boundary
if (((stringByteCount + formulaSize) % 2) != 0) {
int b = in.readByte();
remaining -= LittleEndian.BYTE_SIZE;
if (field_2_refPtg != null && field_4_ole_classname == null) {
field_4_unknownByte = new Byte((byte)b);
}
}
int nUnexpectedPadding = in.remaining() - dataLenAfterFormula;
int nUnexpectedPadding = remaining - dataLenAfterFormula;
if (nUnexpectedPadding > 0) {
System.err.println("Discarding " + nUnexpectedPadding + " unexpected padding bytes ");
readRawData(in, nUnexpectedPadding);
remaining-=nUnexpectedPadding;
}
// Fetch the stream ID
if (dataLenAfterFormula >= 4) {
field_5_stream_id = new Integer(in.readInt());
remaining -= LittleEndian.INT_SIZE;
} else {
field_5_stream_id = null;
}
field_6_unknown = in.readRemainder();
byte [] buf = new byte[remaining];
in.readFully(buf);
field_6_unknown = buf;
}
private static Ptg readRefPtg(byte[] formulaRawBytes) {
@ -146,17 +162,17 @@ public final class EmbeddedObjectRefSubRecord extends SubRecord {
System.arraycopy(formulaRawBytes, 0, data, 4, formulaRawBytes.length);
RecordInputStream in = new RecordInputStream(new ByteArrayInputStream(data));
in.nextRecord();
byte ptgSid = in.readByte();
byte ptgSid = in.readByte();
switch(ptgSid) {
case AreaPtg.sid: return new AreaPtg(in);
case Area3DPtg.sid: return new Area3DPtg(in);
case RefPtg.sid: return new RefPtg(in);
case RefPtg.sid: return new RefPtg(in);
case Ref3DPtg.sid: return new Ref3DPtg(in);
}
return null;
}
private static byte[] readRawData(RecordInputStream in, int size) {
private static byte[] readRawData(LittleEndianInput in, int size) {
if (size < 0) {
throw new IllegalArgumentException("Negative size (" + size + ")");
}
@ -202,32 +218,32 @@ public final class EmbeddedObjectRefSubRecord extends SubRecord {
}
return result + field_6_unknown.length;
}
private int getDataSize() {
protected int getDataSize() {
int formulaSize = field_2_refPtg == null ? field_2_unknownFormulaData.length : field_2_refPtg.getSize();
int idOffset = getStreamIDOffset(formulaSize);
return getDataSize(idOffset);
}
public int serialize(int base, byte[] data) {
public void serialize(LittleEndianOutput out) {
int formulaSize = field_2_refPtg == null ? field_2_unknownFormulaData.length : field_2_refPtg.getSize();
int idOffset = getStreamIDOffset(formulaSize);
int dataSize = getDataSize(idOffset);
LittleEndian.putUShort(data, base + 0, sid);
LittleEndian.putUShort(data, base + 2, dataSize);
out.writeShort(sid);
out.writeShort(dataSize);
LittleEndian.putUShort(data, base + 4, idOffset);
LittleEndian.putUShort(data, base + 6, formulaSize);
LittleEndian.putInt(data, base + 8, field_1_unknown_int);
out.writeShort(idOffset);
out.writeShort(formulaSize);
out.writeInt(field_1_unknown_int);
int pos = base+12;
int pos = 12;
if (field_2_refPtg == null) {
System.arraycopy(field_2_unknownFormulaData, 0, data, pos, field_2_unknownFormulaData.length);
out.write(field_2_unknownFormulaData);
} else {
field_2_refPtg.writeBytes(data, pos);
field_2_refPtg.write(out);
}
pos += formulaSize;
@ -236,45 +252,39 @@ public final class EmbeddedObjectRefSubRecord extends SubRecord {
// don't write 0x03, stringLen, flag, text
stringLen = 0;
} else {
LittleEndian.putByte(data, pos, 0x03);
pos += 1;
out.writeByte(0x03);
pos+=1;
stringLen = field_4_ole_classname.length();
LittleEndian.putUShort(data, pos, stringLen);
pos += 2;
LittleEndian.putByte(data, pos, field_3_unicode_flag ? 0x01 : 0x00);
pos += 1;
out.writeShort(stringLen);
pos+=2;
out.writeByte(field_3_unicode_flag ? 0x01 : 0x00);
pos+=1;
if (field_3_unicode_flag) {
StringUtil.putUnicodeLE(field_4_ole_classname, data, pos);
StringUtil.putUnicodeLE(field_4_ole_classname, out);
pos += stringLen * 2;
} else {
StringUtil.putCompressedUnicode(field_4_ole_classname, data, pos);
StringUtil.putCompressedUnicode(field_4_ole_classname, out);
pos += stringLen;
}
}
// pad to next 2-byte boundary (requires 0 or 1 bytes)
switch(idOffset - (pos - 6 - base)) { // 6 for 3 shorts: sid, dataSize, idOffset
switch(idOffset - (pos - 6)) { // 6 for 3 shorts: sid, dataSize, idOffset
case 1:
LittleEndian.putByte(data, pos, field_4_unknownByte == null ? 0x00 : field_4_unknownByte.intValue());
out.writeByte(field_4_unknownByte == null ? 0x00 : field_4_unknownByte.intValue());
pos ++;
case 0:
break;
default:
throw new IllegalStateException("Bad padding calculation (" + idOffset + ", " + (pos-base) + ")");
throw new IllegalStateException("Bad padding calculation (" + idOffset + ", " + pos + ")");
}
if (field_5_stream_id != null) {
LittleEndian.putInt(data, pos, field_5_stream_id.intValue());
out.writeInt(field_5_stream_id.intValue());
pos += 4;
}
System.arraycopy(field_6_unknown, 0, data, pos, field_6_unknown.length);
return 4 + dataSize;
}
public int getRecordSize() {
return 4 + getDataSize();
out.write(field_6_unknown);
}
/**
@ -297,6 +307,9 @@ public final class EmbeddedObjectRefSubRecord extends SubRecord {
return field_6_unknown;
}
public Object clone() {
return this; // TODO proper clone
}
public String toString() {
StringBuffer sb = new StringBuffer();

View File

@ -17,16 +17,19 @@
package org.apache.poi.hssf.record;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianInput;
import org.apache.poi.util.LittleEndianOutput;
/**
* ftEnd (0x0000)<p/>
*
* The end data record is used to denote the end of the subrecords.<p/>
*
* @author Glen Stampoultzis (glens at apache.org)
*/
public final class EndSubRecord extends SubRecord {
public final static short sid = 0x00;
public final static short sid = 0x0000; // Note - zero sid is somewhat unusual (compared to plain Records)
private static final int ENCODED_SIZE = 0;
public EndSubRecord()
{
@ -35,9 +38,12 @@ public final class EndSubRecord extends SubRecord {
/**
* @param in unused (since this record has no data)
* @param size
*/
public EndSubRecord(RecordInputStream in)
{
public EndSubRecord(LittleEndianInput in, int size) {
if ((size & 0xFF) != ENCODED_SIZE) { // mask out random crap in upper byte
throw new RecordFormatException("Unexpected size (" + size + ")");
}
}
public String toString()
@ -50,18 +56,13 @@ public final class EndSubRecord extends SubRecord {
return buffer.toString();
}
public int serialize(int offset, byte[] data)
{
LittleEndian.putShort(data, 0 + offset, sid);
LittleEndian.putShort(data, 2 + offset, (short)(getRecordSize() - 4));
return getRecordSize();
public void serialize(LittleEndianOutput out) {
out.writeShort(sid);
out.writeShort(ENCODED_SIZE);
}
public int getRecordSize()
{
return 4 ;
protected int getDataSize() {
return ENCODED_SIZE;
}
public short getSid()

View File

@ -18,26 +18,30 @@
package org.apache.poi.hssf.record;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianInput;
import org.apache.poi.util.LittleEndianOutput;
/**
* ftGmo (0x0006)<p/>
* The group marker record is used as a position holder for groups.
* @author Glen Stampoultzis (glens at apache.org)
*/
public class GroupMarkerSubRecord extends SubRecord {
public final static short sid = 0x0006;
public final class GroupMarkerSubRecord extends SubRecord {
public final static short sid = 0x0006;
private byte[] reserved = new byte[0]; // would really love to know what goes in here.
private static final byte[] EMPTY_BYTE_ARRAY = { };
public GroupMarkerSubRecord()
{
private byte[] reserved; // would really love to know what goes in here.
public GroupMarkerSubRecord() {
reserved = EMPTY_BYTE_ARRAY;
}
public GroupMarkerSubRecord(RecordInputStream in)
{
reserved = in.readRemainder();
public GroupMarkerSubRecord(LittleEndianInput in, int size) {
byte[] buf = new byte[size];
in.readFully(buf);
reserved = buf;
}
public String toString()
@ -51,18 +55,14 @@ public class GroupMarkerSubRecord extends SubRecord {
return buffer.toString();
}
public int serialize(int offset, byte[] data)
{
LittleEndian.putShort(data, 0 + offset, sid);
LittleEndian.putShort(data, 2 + offset, (short)(getRecordSize() - 4));
System.arraycopy(reserved, 0, data, offset + 4, getRecordSize() - 4);
return getRecordSize();
public void serialize(LittleEndianOutput out) {
out.writeShort(sid);
out.writeShort(reserved.length);
out.write(reserved);
}
public int getRecordSize()
{
return 4 + reserved.length;
protected int getDataSize() {
return reserved.length;
}
public short getSid()

View File

@ -17,10 +17,13 @@
package org.apache.poi.hssf.record;
import org.apache.poi.util.*;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndianInput;
import org.apache.poi.util.LittleEndianOutput;
/**
* Represents a NoteStructure (0xD) sub record.
* ftNts (0x000D)<p/>
* Represents a NoteStructure sub record.
*
* <p>
* The docs say nothing about it. The length of this record is always 26 bytes.
@ -28,45 +31,49 @@ import org.apache.poi.util.*;
*
* @author Yegor Kozlov
*/
public class NoteStructureSubRecord
extends SubRecord
{
public final static short sid = 0x0D;
public final class NoteStructureSubRecord extends SubRecord {
public final static short sid = 0x0D;
private static final int ENCODED_SIZE = 22;
private byte[] reserved;
/**
* Construct a new <code>NoteStructureSubRecord</code> and
* fill its data with the default values
* @param size
* @param in
*/
public NoteStructureSubRecord()
{
//all we know is that the the length of <code>NoteStructureSubRecord</code> is always 22 bytes
reserved = new byte[22];
reserved = new byte[ENCODED_SIZE];
}
/**
* Read the record data from the supplied <code>RecordInputStream</code>
*/
public NoteStructureSubRecord(RecordInputStream in)
{
public NoteStructureSubRecord(LittleEndianInput in, int size) {
if (size != ENCODED_SIZE) {
throw new RecordFormatException("Unexpected size (" + size + ")");
}
//just grab the raw data
reserved = in.readRemainder();
byte[] buf = new byte[size];
in.readFully(buf);
reserved = buf;
}
/**
* Convert this record to string.
* Used by BiffViewer and other utulities.
* Used by BiffViewer and other utilities.
*/
public String toString()
{
StringBuffer buffer = new StringBuffer();
String nl = System.getProperty("line.separator");
buffer.append("[ftNts ]" + nl);
buffer.append(" size = ").append(getRecordSize()).append(nl);
buffer.append(" reserved = ").append(HexDump.toHex(reserved)).append(nl);
buffer.append("[/ftNts ]" + nl);
buffer.append("[ftNts ]").append("\n");
buffer.append(" size = ").append(getDataSize()).append("\n");
buffer.append(" reserved = ").append(HexDump.toHex(reserved)).append("\n");
buffer.append("[/ftNts ]").append("\n");
return buffer.toString();
}
@ -78,18 +85,14 @@ public class NoteStructureSubRecord
*
* @return size of the record
*/
public int serialize(int offset, byte[] data)
{
LittleEndian.putShort(data, 0 + offset, sid);
LittleEndian.putShort(data, 2 + offset, (short)(getRecordSize() - 4));
System.arraycopy(reserved, 0, data, offset + 4, getRecordSize() - 4);
return getRecordSize();
public void serialize(LittleEndianOutput out) {
out.writeShort(sid);
out.writeShort(reserved.length);
out.write(reserved);
}
public int getRecordSize()
{
return 4 + reserved.length;
protected int getDataSize() {
return reserved.length;
}
/**

View File

@ -18,150 +18,163 @@
package org.apache.poi.hssf.record;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianInputStream;
import org.apache.poi.util.LittleEndianOutput;
import org.apache.poi.util.LittleEndianOutputStream;
/**
* OBJRECORD (0x005D)<p/>
*
* The obj record is used to hold various graphic objects and controls.
*
* @author Glen Stampoultzis (glens at apache.org)
*/
public final class ObjRecord extends Record {
public final static short sid = 0x005D;
private List subrecords;
public final static short sid = 0x005D;
private List subrecords;
/** used when POI has no idea what is going on */
private byte[] _uninterpretedData;
//00000000 15 00 12 00 01 00 01 00 11 60 00 00 00 00 00 0D .........`......
//00000010 26 01 00 00 00 00 00 00 00 00 &.........
//00000000 15 00 12 00 01 00 01 00 11 60 00 00 00 00 00 0D .........`......
//00000010 26 01 00 00 00 00 00 00 00 00 &.........
public ObjRecord()
{
subrecords = new ArrayList(2);
// TODO - ensure 2 sub-records (ftCmo 15h, and ftEnd 00h) are always created
}
public ObjRecord() {
subrecords = new ArrayList(2);
// TODO - ensure 2 sub-records (ftCmo 15h, and ftEnd 00h) are always created
}
public ObjRecord(RecordInputStream in)
{
// TODO - problems with OBJ sub-records stream
// MS spec says first sub-records is always CommonObjectDataSubRecord, and last is
// always EndSubRecord. OOO spec does not mention ObjRecord(0x005D).
// Existing POI test data seems to violate that rule. Some test data seems to contain
// garbage, and a crash is only averted by stopping at what looks like the 'EndSubRecord'
subrecords = new ArrayList();
//Check if this can be continued, if so then the
//following wont work properly
int subSize = 0;
byte[] subRecordData = in.readRemainder();
RecordInputStream subRecStream = new RecordInputStream(new ByteArrayInputStream(subRecordData));
while(subRecStream.hasNextRecord()) {
subRecStream.nextRecord();
Record subRecord = SubRecord.createSubRecord(subRecStream);
subSize += subRecord.getRecordSize();
subrecords.add(subRecord);
if (subRecord instanceof EndSubRecord) {
break;
}
}
public ObjRecord(RecordInputStream in) {
// TODO - problems with OBJ sub-records stream
// MS spec says first sub-records is always CommonObjectDataSubRecord,
// and last is
// always EndSubRecord. OOO spec does not mention ObjRecord(0x005D).
// Existing POI test data seems to violate that rule. Some test data
// seems to contain
// garbage, and a crash is only averted by stopping at what looks like
// the 'EndSubRecord'
/**
* Add the EndSubRecord explicitly.
*
* TODO - the reason the EndSubRecord is always skipped is because its 'sid' is zero and
* that causes subRecStream.hasNextRecord() to return false.
* There may be more than the size of EndSubRecord left-over, if there is any padding
* after that record. The content of the EndSubRecord and the padding is all zeros.
* So there's not much to look at past the last substantial record.
*
* See Bugs 41242/45133 for details.
*/
if (subRecordData.length - subSize >= 4) {
subrecords.add(new EndSubRecord());
}
}
// Check if this can be continued, if so then the
// following wont work properly
byte[] subRecordData = in.readRemainder();
if (LittleEndian.getUShort(subRecordData, 0) != CommonObjectDataSubRecord.sid) {
// seems to occur in just one junit on "OddStyleRecord.xls" (file created by CrystalReports)
// Excel tolerates the funny ObjRecord, and replaces it with a corrected version
// The exact logic/reasoning is not yet understood
_uninterpretedData = subRecordData;
return;
}
public String toString()
{
StringBuffer buffer = new StringBuffer();
// System.out.println(HexDump.toHex(subRecordData));
buffer.append("[OBJ]\n");
for ( Iterator iterator = subrecords.iterator(); iterator.hasNext(); )
{
Record record = (Record) iterator.next();
buffer.append("SUBRECORD: " + record.toString());
}
buffer.append("[/OBJ]\n");
return buffer.toString();
}
subrecords = new ArrayList();
ByteArrayInputStream bais = new ByteArrayInputStream(subRecordData);
LittleEndianInputStream subRecStream = new LittleEndianInputStream(bais);
while (true) {
SubRecord subRecord = SubRecord.createSubRecord(subRecStream);
subrecords.add(subRecord);
if (subRecord instanceof EndSubRecord) {
break;
}
}
if (bais.available() > 0) {
// earlier versions of the code had allowances for padding
// At present (Oct-2008), no unit test samples exhibit such padding
String msg = "Leftover " + bais.available()
+ " bytes in subrecord data " + HexDump.toHex(subRecordData);
throw new RecordFormatException(msg);
}
}
public int serialize(int offset, byte[] data)
{
int pos = 0;
public String toString() {
StringBuffer sb = new StringBuffer();
LittleEndian.putShort(data, 0 + offset, sid);
LittleEndian.putShort(data, 2 + offset, (short)(getRecordSize() - 4));
sb.append("[OBJ]\n");
for (int i = 0; i < subrecords.size(); i++) {
SubRecord record = (SubRecord) subrecords.get(i);
sb.append("SUBRECORD: ").append(record.toString());
}
sb.append("[/OBJ]\n");
return sb.toString();
}
private int getDataSize() {
if (_uninterpretedData != null) {
return _uninterpretedData.length;
}
int size = 0;
for (int i=subrecords.size()-1; i>=0; i--) {
SubRecord record = (SubRecord) subrecords.get(i);
size += record.getDataSize()+4;
}
return size;
}
pos = offset + 4;
for ( Iterator iterator = subrecords.iterator(); iterator.hasNext(); )
{
Record record = (Record) iterator.next();
pos += record.serialize(pos, data);
}
// assume padding (if present) does not need to be written.
// it is probably zero already, and it probably doesn't matter anyway
public int serialize(int offset, byte[] data) {
int dataSize = getDataSize();
return getRecordSize();
}
LittleEndian.putUShort(data, 0 + offset, sid);
LittleEndian.putUShort(data, 2 + offset, dataSize);
public int getRecordSize()
{
int size = 0;
for ( Iterator iterator = subrecords.iterator(); iterator.hasNext(); )
{
Record record = (Record) iterator.next();
size += record.getRecordSize();
}
int oddBytes = size & 0x03;
int padding = oddBytes == 0 ? 0 : 4 - oddBytes;
return 4 + size + padding;
}
byte[] subRecordBytes;
if (_uninterpretedData == null) {
ByteArrayOutputStream baos = new ByteArrayOutputStream(dataSize);
LittleEndianOutput leo = new LittleEndianOutputStream(baos);
public short getSid()
{
return sid;
}
for (int i = 0; i < subrecords.size(); i++) {
SubRecord record = (SubRecord) subrecords.get(i);
record.serialize(leo);
}
// padding
while (baos.size() < dataSize) {
baos.write(0);
}
subRecordBytes = baos.toByteArray();
} else {
subRecordBytes = _uninterpretedData;
}
System.arraycopy(subRecordBytes, 0, data, offset + 4, dataSize);
return 4 + dataSize;
}
public List getSubRecords()
{
return subrecords;
}
public int getRecordSize() {
return 4 + getDataSize();
}
public void clearSubRecords()
{
subrecords.clear();
}
public short getSid() {
return sid;
}
public void addSubRecord(int index, Object element)
{
subrecords.add( index, element );
}
public List getSubRecords() {
return subrecords;
}
public boolean addSubRecord(Object o)
{
return subrecords.add( o );
}
public void clearSubRecords() {
subrecords.clear();
}
public Object clone()
{
ObjRecord rec = new ObjRecord();
public void addSubRecord(int index, Object element) {
subrecords.add(index, element);
}
for ( Iterator iterator = subrecords.iterator(); iterator.hasNext(); )
rec.addSubRecord(( (Record) iterator.next() ).clone());
public boolean addSubRecord(Object o) {
return subrecords.add(o);
}
return rec;
}
public Object clone() {
ObjRecord rec = new ObjRecord();
for (int i = 0; i < subrecords.size(); i++) {
SubRecord record = (SubRecord) subrecords.get(i);
rec.addSubRecord(record.clone());
}
return rec;
}
}

View File

@ -18,6 +18,7 @@
package org.apache.poi.hssf.record;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianInput;
import java.io.IOException;
import java.io.InputStream;
@ -29,7 +30,7 @@ import java.io.ByteArrayOutputStream;
*
* @author Jason Height (jheight @ apache dot org)
*/
public final class RecordInputStream extends InputStream {
public final class RecordInputStream extends InputStream implements LittleEndianInput {
/** Maximum size of a single record (minus the 4 byte header) without a continue*/
public final static short MAX_RECORD_DATA_SIZE = 8224;
private static final int INVALID_SID_VALUE = -1;
@ -189,8 +190,8 @@ public final class RecordInputStream extends InputStream {
/**
* Reads an 8 bit, unsigned value
*/
public short readUByte() {
return (short) (readByte() & 0x00FF);
public int readUByte() {
return readByte() & 0x00FF;
}
/**
@ -217,6 +218,16 @@ public final class RecordInputStream extends InputStream {
pos += LittleEndian.DOUBLE_SIZE;
return result;
}
public void readFully(byte[] buf) {
readFully(buf, 0, buf.length);
}
public void readFully(byte[] buf, int off, int len) {
checkRecordPosition(len);
System.arraycopy(data, recordOffset, buf, off, len);
recordOffset+=len;
pos+=len;
}
public String readString() {
int requestedLength = readUShort();

View File

@ -17,52 +17,239 @@
package org.apache.poi.hssf.record;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import org.apache.poi.hssf.record.formula.Area3DPtg;
import org.apache.poi.hssf.record.formula.AreaPtg;
import org.apache.poi.hssf.record.formula.Ptg;
import org.apache.poi.hssf.record.formula.Ref3DPtg;
import org.apache.poi.hssf.record.formula.RefPtg;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianInput;
import org.apache.poi.util.LittleEndianOutput;
import org.apache.poi.util.LittleEndianOutputStream;
/**
* Subrecords are part of the OBJ class.
*/
abstract public class SubRecord extends Record {
protected SubRecord() {
}
public abstract class SubRecord {
protected SubRecord() {
}
public static Record createSubRecord(RecordInputStream in)
{
Record r = null;
public static SubRecord createSubRecord(LittleEndianInput in) {
int sid = in.readUShort();
int secondUShort = in.readUShort(); // Often (but not always) the datasize for the sub-record
/* This must surely be an earlier hack?? Delete when confident
short adjustedSize = size;
if ( size < 0 )
{
adjustedSize = 0;
}
else if ( offset + size > data.length )
{
adjustedSize = (short) ( data.length - offset );
if ( adjustedSize > 4 )
{
adjustedSize -= 4;
}
}
*/
switch ( in.getSid() )
{
case CommonObjectDataSubRecord.sid:
r = new CommonObjectDataSubRecord( in );
break;
case EmbeddedObjectRefSubRecord.sid:
r = new EmbeddedObjectRefSubRecord( in );
break;
case GroupMarkerSubRecord.sid:
r = new GroupMarkerSubRecord( in );
break;
case EndSubRecord.sid:
r = new EndSubRecord( in );
break;
case NoteStructureSubRecord.sid:
r = new NoteStructureSubRecord( in );
break;
default:
r = new UnknownRecord( in );
}
return r;
}
switch (sid) {
case CommonObjectDataSubRecord.sid:
return new CommonObjectDataSubRecord(in, secondUShort);
case EmbeddedObjectRefSubRecord.sid:
return new EmbeddedObjectRefSubRecord(in, secondUShort);
case GroupMarkerSubRecord.sid:
return new GroupMarkerSubRecord(in, secondUShort);
case EndSubRecord.sid:
return new EndSubRecord(in, secondUShort);
case NoteStructureSubRecord.sid:
return new NoteStructureSubRecord(in, secondUShort);
case LbsDataSubRecord.sid:
return new LbsDataSubRecord(in, secondUShort);
}
return new UnknownSubRecord(in, sid, secondUShort);
}
/**
* @return the size of the data for this record (which is always 4 bytes less than the total
* record size). Note however, that ushort encoded after the record sid is usually but not
* always the data size.
*/
protected abstract int getDataSize();
public byte[] serialize() {
int size = getDataSize() + 4;
ByteArrayOutputStream baos = new ByteArrayOutputStream(size);
serialize(new LittleEndianOutputStream(baos));
if (baos.size() != size) {
throw new RuntimeException("write size mismatch");
}
return baos.toByteArray();
}
public abstract void serialize(LittleEndianOutput out);
public abstract Object clone();
private static final class UnknownSubRecord extends SubRecord {
private final int _sid;
private final byte[] _data;
public UnknownSubRecord(LittleEndianInput in, int sid, int size) {
_sid = sid;
byte[] buf = new byte[size];
in.readFully(buf);
_data = buf;
}
protected int getDataSize() {
return _data.length;
}
public void serialize(LittleEndianOutput out) {
out.writeShort(_sid);
out.writeShort(_data.length);
out.write(_data);
}
public Object clone() {
return this;
}
public String toString() {
StringBuffer sb = new StringBuffer(64);
sb.append(getClass().getName()).append(" [");
sb.append("sid=").append(HexDump.shortToHex(_sid));
sb.append(" size=").append(_data.length);
sb.append(" : ").append(HexDump.toHex(_data));
sb.append("]\n");
return sb.toString();
}
}
// TODO make into a top level class
// perhaps all SubRecord sublcasses could go to their own package
private static final class LbsDataSubRecord extends SubRecord {
public static final int sid = 0x0013;
private int _unknownShort1;
private int _unknownInt4;
private Ptg _linkPtg;
private Byte _unknownByte6;
private int _nEntryCount;
private int _selectedEntryIndex;
private int _style;
private int _unknownShort10;
private int _comboStyle;
private int _lineCount;
private int _unknownShort13;
public LbsDataSubRecord(LittleEndianInput in, int unknownShort1) {
_unknownShort1 = unknownShort1;
int linkSize = in.readUShort();
if (linkSize > 0) {
int formulaSize = in.readUShort();
_unknownInt4 = in.readInt();
byte[] buf = new byte[formulaSize];
in.readFully(buf);
_linkPtg = readRefPtg(buf);
switch (linkSize - formulaSize - 6) {
case 1:
_unknownByte6 = new Byte(in.readByte());
break;
case 0:
_unknownByte6 = null;
break;
default:
throw new RecordFormatException("Unexpected leftover bytes");
}
} else {
_unknownInt4 = 0;
_linkPtg = null;
_unknownByte6 = null;
}
_nEntryCount = in.readUShort();
_selectedEntryIndex = in.readUShort();
_style = in.readUShort();
_unknownShort10 = in.readUShort();
_comboStyle = in.readUShort();
_lineCount = in.readUShort();
_unknownShort13 = in.readUShort();
}
protected int getDataSize() {
int result = 2; // 2 initial shorts
// optional link formula
if (_linkPtg != null) {
result += 2; // encoded Ptg size
result += 4; // unknown int
result += _linkPtg.getSize();
if (_unknownByte6 != null) {
result += 1;
}
}
result += 7 * 2; // 7 shorts
return result;
}
public void serialize(LittleEndianOutput out) {
out.writeShort(sid);
out.writeShort(_unknownShort1); // note - this is *not* the size
if (_linkPtg == null) {
out.writeShort(0);
} else {
int formulaSize = _linkPtg.getSize();
int linkSize = formulaSize + 6;
if (_unknownByte6 != null) {
linkSize++;
}
out.writeShort(linkSize);
out.writeShort(formulaSize);
out.writeInt(_unknownInt4);
_linkPtg.write(out);
if (_unknownByte6 != null) {
out.writeByte(_unknownByte6.intValue());
}
}
out.writeShort(_nEntryCount);
out.writeShort(_selectedEntryIndex);
out.writeShort(_style);
out.writeShort(_unknownShort10);
out.writeShort(_comboStyle);
out.writeShort(_lineCount);
out.writeShort(_unknownShort13);
}
private static Ptg readRefPtg(byte[] formulaRawBytes) {
byte[] data = new byte[formulaRawBytes.length + 4];
LittleEndian.putUShort(data, 0, -5555);
LittleEndian.putUShort(data, 2, formulaRawBytes.length);
System.arraycopy(formulaRawBytes, 0, data, 4, formulaRawBytes.length);
RecordInputStream in = new RecordInputStream(new ByteArrayInputStream(data));
in.nextRecord();
byte ptgSid = in.readByte();
switch(ptgSid) {
case AreaPtg.sid: return new AreaPtg(in);
case Area3DPtg.sid: return new Area3DPtg(in);
case RefPtg.sid: return new RefPtg(in);
case Ref3DPtg.sid: return new Ref3DPtg(in);
}
return null;
}
public Object clone() {
return this;
}
public String toString() {
StringBuffer sb = new StringBuffer(256);
sb.append("[ftLbsData]\n");
sb.append(" .unknownShort1 =").append(HexDump.shortToHex(_unknownShort1)).append("\n");
if (_linkPtg == null) {
sb.append(" <no link formula>\n");
} else {
sb.append(" .unknownInt4 =").append(HexDump.intToHex(_unknownInt4)).append("\n");
sb.append(" .linkPtg =").append(_linkPtg.toFormulaString()).append(" (").append(_linkPtg.getRVAType()).append(")").append("\n");
if (_unknownByte6 != null) {
sb.append(" .unknownByte6 =").append(HexDump.byteToHex(_unknownByte6.byteValue())).append("\n");
}
}
sb.append(" .nEntryCount =").append(HexDump.shortToHex(_nEntryCount)).append("\n");
sb.append(" .selEntryIx =").append(HexDump.shortToHex(_selectedEntryIndex)).append("\n");
sb.append(" .style =").append(HexDump.shortToHex(_style)).append("\n");
sb.append(" .unknownShort10=").append(HexDump.shortToHex(_unknownShort10)).append("\n");
sb.append(" .comboStyle =").append(HexDump.shortToHex(_comboStyle)).append("\n");
sb.append(" .lineCount =").append(HexDump.shortToHex(_lineCount)).append("\n");
sb.append(" .unknownShort13=").append(HexDump.shortToHex(_unknownShort13)).append("\n");
sb.append("[/ftLbsData]\n");
return sb.toString();
}
}
}

View File

@ -21,7 +21,6 @@ import org.apache.poi.hssf.record.RecordInputStream;
import org.apache.poi.hssf.record.UnicodeString;
import org.apache.poi.hssf.record.constant.ConstantValueParser;
import org.apache.poi.hssf.record.constant.ErrorConstant;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.util.LittleEndian;
/**
@ -51,7 +50,7 @@ public final class ArrayPtg extends Ptg {
private final byte[] field_1_reserved;
// data from these fields comes after the Ptg data of all tokens in current formula
private short token_1_columns;
private int token_1_columns;
private short token_2_rows;
private Object[] token_3_arrayValues;
@ -110,7 +109,7 @@ public final class ArrayPtg extends Ptg {
* See page 304-305 of Excel97-2007BinaryFileFormat(xls)Specification.pdf
*/
public void readTokenValues(RecordInputStream in) {
short nColumns = in.readUByte();
int nColumns = in.readUByte();
short nRows = in.readShort();
//The token_1_columns and token_2_rows do not follow the documentation.
//The number of physical rows and columns is actually +1 of these values.
@ -173,7 +172,7 @@ public final class ArrayPtg extends Ptg {
}
public short getColumnCount() {
return token_1_columns;
return (short)token_1_columns;
}
/** This size includes the size of the array Ptg plus the Array Ptg Token value size*/

View File

@ -21,6 +21,8 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.poi.hssf.record.RecordInputStream;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndianOutput;
/**
* <tt>Ptg</tt> represents a syntactic token in a formula. 'PTG' is an acronym for
@ -264,7 +266,7 @@ public abstract class Ptg implements Cloneable {
*/
// public abstract int getDataSize();
public final byte [] getBytes()
public final byte[] getBytes()
{
int size = getSize();
byte[] bytes = new byte[ size ];
@ -275,6 +277,10 @@ public abstract class Ptg implements Cloneable {
/** write this Ptg to a byte array*/
public abstract void writeBytes(byte [] array, int offset);
public void write(LittleEndianOutput out) {
out.write(getBytes()); // TODO - optimise - just a hack for the moment
}
/**
* return a string representation of this token alone
*/
@ -284,14 +290,13 @@ public abstract class Ptg implements Cloneable {
*/
public final String toDebugString() {
byte[] ba = new byte[getSize()];
String retval=null;
writeBytes(ba,0);
try {
retval = org.apache.poi.util.HexDump.dump(ba,0,0);
return HexDump.dump(ba,0,0);
} catch (Exception e) {
e.printStackTrace();
}
return retval;
return null;
}
/** Overridden toString method to ensure object hash is not printed.

View File

@ -0,0 +1,35 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.util;
/**
*
* @author Josh Micich
*/
public interface LittleEndianInput {
byte readByte();
int readUByte();
short readShort();
int readUShort();
int readInt();
long readLong();
double readDouble();
void readFully(byte[] buf);
void readFully(byte[] buf, int off, int len);
String readUnicodeLEString(int nChars);
String readCompressedUnicode(int nChars);
}

View File

@ -0,0 +1,164 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.util;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
/**
*
* @author Josh Micich
*/
public class LittleEndianInputStream extends FilterInputStream implements LittleEndianInput {
public LittleEndianInputStream(InputStream is) {
super(is);
}
public byte readByte() {
return (byte)readUByte();
}
public int readUByte() {
int ch;
try {
ch = in.read();
} catch (IOException e) {
throw new RuntimeException(e);
}
checkEOF(ch);
return ch;
}
public double readDouble() {
return Double.longBitsToDouble(readLong());
}
public int readInt() {
int ch1;
int ch2;
int ch3;
int ch4;
try {
ch1 = in.read();
ch2 = in.read();
ch3 = in.read();
ch4 = in.read();
} catch (IOException e) {
throw new RuntimeException(e);
}
checkEOF(ch1 | ch2 | ch3 | ch4);
return (ch4 << 24) + (ch3 << 16) + (ch2 << 8) + (ch1 << 0);
}
public long readLong() {
int b0;
int b1;
int b2;
int b3;
int b4;
int b5;
int b6;
int b7;
try {
b0 = in.read();
b1 = in.read();
b2 = in.read();
b3 = in.read();
b4 = in.read();
b5 = in.read();
b6 = in.read();
b7 = in.read();
} catch (IOException e) {
throw new RuntimeException(e);
}
checkEOF(b0 | b1 | b2 | b3 | b4 | b5 | b6 | b7);
return (((long)b7 << 56) +
((long)b6 << 48) +
((long)b5 << 40) +
((long)b4 << 32) +
((long)b3 << 24) +
(b2 << 16) +
(b1 << 8) +
(b0 << 0));
}
public short readShort() {
return (short)readUShort();
}
public int readUShort() {
int ch1;
int ch2;
try {
ch1 = in.read();
ch2 = in.read();
} catch (IOException e) {
throw new RuntimeException(e);
}
checkEOF(ch1 | ch2);
return (ch2 << 8) + (ch1 << 0);
}
private static void checkEOF(int value) {
if (value <0) {
throw new RuntimeException("Unexpected end-of-file");
}
}
public void readFully(byte[] buf) {
readFully(buf, 0, buf.length);
}
public void readFully(byte[] buf, int off, int len) {
int max = off+len;
for(int i=off; i<max; i++) {
int ch;
try {
ch = in.read();
} catch (IOException e) {
throw new RuntimeException(e);
}
checkEOF(ch);
buf[i] = (byte) ch;
}
}
public String readCompressedUnicode(int nChars) {
char[] buf = new char[nChars];
for (int i = 0; i < buf.length; i++) {
int ch;
try {
ch = in.read();
} catch (IOException e) {
throw new RuntimeException(e);
}
checkEOF(ch);
buf[i] = (char) ch;
}
return new String(buf);
}
public String readUnicodeLEString(int nChars) {
char[] buf = new char[nChars];
for (int i = 0; i < buf.length; i++) {
int ch;
try {
ch = in.read();
} catch (IOException e) {
throw new RuntimeException(e);
}
checkEOF(ch);
buf[i] = (char) ch;
}
return new String(buf);
}
}

View File

@ -0,0 +1,30 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.util;
/**
*
* @author Josh Micich
*/
public interface LittleEndianOutput {
void writeByte(int v);
void writeShort(int v);
void writeInt(int v);
void writeLong(long v);
void writeDouble(double v);
void write(byte[] data);
}

View File

@ -0,0 +1,83 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.util;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.io.OutputStream;
/**
*
* @author Josh Micich
*/
public final class LittleEndianOutputStream extends FilterOutputStream implements LittleEndianOutput {
public LittleEndianOutputStream(OutputStream out) {
super(out);
}
public void writeByte(int v) {
try {
out.write(v);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void writeDouble(double v) {
writeLong(Double.doubleToLongBits(v));
}
public void writeInt(int v) {
int b3 = (v >>> 24) & 0xFF;
int b2 = (v >>> 16) & 0xFF;
int b1 = (v >>> 8) & 0xFF;
int b0 = (v >>> 0) & 0xFF;
try {
out.write(b0);
out.write(b1);
out.write(b2);
out.write(b3);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void writeLong(long v) {
writeInt((int)(v >> 0));
writeInt((int)(v >> 32));
}
public void writeShort(int v) {
int b1 = (v >>> 8) & 0xFF;
int b0 = (v >>> 0) & 0xFF;
try {
out.write(b0);
out.write(b1);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void write(byte[] b) {
// suppress IOException for interface method
try {
super.write(b);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}

View File

@ -1,4 +1,3 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
@ -15,37 +14,34 @@
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.util;
import java.io.UnsupportedEncodingException;
import java.text.FieldPosition;
import java.text.NumberFormat;
/**
* Title: String Utility Description: Collection of string handling utilities
*
*
*@author Andrew C. Oliver
*@author Sergei Kozello (sergeikozello at mail.ru)
*@author Toshiaki Kamoshida (kamoshida.toshiaki at future dot co dot jp)
*@since May 10, 2002
*@version 1.0
/**
* Title: String Utility Description: Collection of string handling utilities
*
*
*@author Andrew C. Oliver
*@author Sergei Kozello (sergeikozello at mail.ru)
*@author Toshiaki Kamoshida (kamoshida.toshiaki at future dot co dot jp)
*/
public class StringUtil {
private final static String ENCODING = "ISO-8859-1";
/**
* Constructor for the StringUtil object
*/
private static final String ENCODING_ISO_8859_1 = "ISO-8859-1";
private StringUtil() {
// no instances of this class
}
/**
/**
* Given a byte array of 16-bit unicode characters in Little Endian
* format (most important byte last), return a Java String representation
* of it.
*
* { 0x16, 0x00 } -0x16
*
* of it.
*
* { 0x16, 0x00 } -0x16
*
* @param string the byte array to be converted
* @param offset the initial offset into the
* byte array. it is assumed that string[ offset ] and string[ offset +
@ -53,11 +49,11 @@ public class StringUtil {
* @param len the length of the final string
* @return the converted string
* @exception ArrayIndexOutOfBoundsException if offset is out of bounds for
* the byte array (i.e., is negative or is greater than or equal to
* string.length)
* the byte array (i.e., is negative or is greater than or equal to
* string.length)
* @exception IllegalArgumentException if len is too large (i.e.,
* there is not enough data in string to create a String of that
* length)
* there is not enough data in string to create a String of that
* length)
*/
public static String getFromUnicodeLE(
final byte[] string,
@ -74,44 +70,44 @@ public class StringUtil {
try {
return new String(string, offset, len * 2, "UTF-16LE");
} catch (UnsupportedEncodingException e) {
throw new InternalError(); /*unreachable*/
throw new RuntimeException(e);
}
}
/**
/**
* Given a byte array of 16-bit unicode characters in little endian
* format (most important byte last), return a Java String representation
* of it.
*
* { 0x16, 0x00 } -0x16
*
*@param string the byte array to be converted
*@return the converted string
* of it.
*
* { 0x16, 0x00 } -0x16
*
* @param string the byte array to be converted
* @return the converted string
*/
public static String getFromUnicodeLE(final byte[] string) {
if(string.length == 0) { return ""; }
return getFromUnicodeLE(string, 0, string.length / 2);
}
/**
/**
* Given a byte array of 16-bit unicode characters in big endian
* format (most important byte first), return a Java String representation
* of it.
*
* { 0x00, 0x16 } -0x16
*
*@param string the byte array to be converted
**@param offset the initial offset into the
* byte array. it is assumed that string[ offset ] and string[ offset +
* 1 ] contain the first 16-bit unicode character
*@param len the length of the final string
*@return the converted string
*@exception ArrayIndexOutOfBoundsException if offset is out of bounds for
* the byte array (i.e., is negative or is greater than or equal to
* string.length)
*@exception IllegalArgumentException if len is too large (i.e.,
* there is not enough data in string to create a String of that
* length)
* of it.
*
* { 0x00, 0x16 } -0x16
*
* @param string the byte array to be converted
* @param offset the initial offset into the
* byte array. it is assumed that string[ offset ] and string[ offset +
* 1 ] contain the first 16-bit unicode character
* @param len the length of the final string
* @return the converted string
* @exception ArrayIndexOutOfBoundsException if offset is out of bounds for
* the byte array (i.e., is negative or is greater than or equal to
* string.length)
* @exception IllegalArgumentException if len is too large (i.e.,
* there is not enough data in string to create a String of that
* length)
*/
public static String getFromUnicodeBE(
final byte[] string,
@ -127,34 +123,34 @@ public class StringUtil {
try {
return new String(string, offset, len * 2, "UTF-16BE");
} catch (UnsupportedEncodingException e) {
throw new InternalError(); /*unreachable*/
throw new RuntimeException(e);
}
}
/**
/**
* Given a byte array of 16-bit unicode characters in big endian
* format (most important byte first), return a Java String representation
* of it.
*
* { 0x00, 0x16 } -0x16
*
*@param string the byte array to be converted
*@return the converted string
*
* { 0x00, 0x16 } -0x16
*
* @param string the byte array to be converted
* @return the converted string
*/
public static String getFromUnicodeBE(final byte[] string) {
if(string.length == 0) { return ""; }
return getFromUnicodeBE(string, 0, string.length / 2);
}
/**
/**
* Read 8 bit data (in ISO-8859-1 codepage) into a (unicode) Java
* String and return.
* (In Excel terms, read compressed 8 bit unicode as a string)
*
* @param string byte array to read
* @param offset offset to read byte array
* @param len length to read byte array
* @return String generated String instance by reading byte array
*
* @param string byte array to read
* @param offset offset to read byte array
* @param len length to read byte array
* @return String generated String instance by reading byte array
*/
public static String getFromCompressedUnicode(
final byte[] string,
@ -162,83 +158,76 @@ public class StringUtil {
final int len) {
try {
int len_to_use = Math.min(len, string.length - offset);
return new String(string, offset, len_to_use, "ISO-8859-1");
return new String(string, offset, len_to_use, ENCODING_ISO_8859_1);
} catch (UnsupportedEncodingException e) {
throw new InternalError(); /* unreachable */
throw new RuntimeException(e);
}
}
/**
* Takes a unicode (java) string, and returns it as 8 bit data (in ISO-8859-1
/**
* Takes a unicode (java) string, and returns it as 8 bit data (in ISO-8859-1
* codepage).
* (In Excel terms, write compressed 8 bit unicode)
*
*@param input the String containing the data to be written
*@param output the byte array to which the data is to be written
*@param offset an offset into the byte arrat at which the data is start
* when written
*
* @param input the String containing the data to be written
* @param output the byte array to which the data is to be written
* @param offset an offset into the byte arrat at which the data is start
* when written
*/
public static void putCompressedUnicode(
final String input,
final byte[] output,
final int offset) {
public static void putCompressedUnicode(String input, byte[] output, int offset) {
byte[] bytes;
try {
byte[] bytes = input.getBytes("ISO-8859-1");
System.arraycopy(bytes, 0, output, offset, bytes.length);
bytes = input.getBytes(ENCODING_ISO_8859_1);
} catch (UnsupportedEncodingException e) {
throw new InternalError(); /*unreachable*/
throw new RuntimeException(e);
}
System.arraycopy(bytes, 0, output, offset, bytes.length);
}
public static void putCompressedUnicode(String input, LittleEndianOutput out) {
byte[] bytes;
try {
bytes = input.getBytes(ENCODING_ISO_8859_1);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
out.write(bytes);
}
/**
* Takes a unicode string, and returns it as little endian (most
/**
* Takes a unicode string, and returns it as little endian (most
* important byte last) bytes in the supplied byte array.
* (In Excel terms, write uncompressed unicode)
*
*@param input the String containing the unicode data to be written
*@param output the byte array to hold the uncompressed unicode, should be twice the length of the String
*@param offset the offset to start writing into the byte array
*
* @param input the String containing the unicode data to be written
* @param output the byte array to hold the uncompressed unicode, should be twice the length of the String
* @param offset the offset to start writing into the byte array
*/
public static void putUnicodeLE(
final String input,
final byte[] output,
final int offset) {
public static void putUnicodeLE(String input, byte[] output, int offset) {
byte[] bytes;
try {
byte[] bytes = input.getBytes("UTF-16LE");
System.arraycopy(bytes, 0, output, offset, bytes.length);
bytes = input.getBytes("UTF-16LE");
} catch (UnsupportedEncodingException e) {
throw new InternalError(); /*unreachable*/
throw new RuntimeException(e);
}
System.arraycopy(bytes, 0, output, offset, bytes.length);
}
public static void putUnicodeLE(String input, LittleEndianOutput out) {
byte[] bytes;
try {
bytes = input.getBytes("UTF-16LE");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
out.write(bytes);
}
/**
* Takes a unicode string, and returns it as big endian (most
* important byte first) bytes in the supplied byte array.
* (In Excel terms, write uncompressed unicode)
*
*@param input the String containing the unicode data to be written
*@param output the byte array to hold the uncompressed unicode, should be twice the length of the String
*@param offset the offset to start writing into the byte array
*/
public static void putUnicodeBE(
final String input,
final byte[] output,
final int offset) {
try {
byte[] bytes = input.getBytes("UTF-16BE");
System.arraycopy(bytes, 0, output, offset, bytes.length);
} catch (UnsupportedEncodingException e) {
throw new InternalError(); /*unreachable*/
}
}
/**
* Apply printf() like formatting to a string.
* Primarily used for logging.
*@param message the string with embedded formatting info
* eg. "This is a test %2.2"
*@param params array of values to format into the string
*@return The formatted string
/**
* Apply printf() like formatting to a string.
* Primarily used for logging.
* @param message the string with embedded formatting info
* eg. "This is a test %2.2"
* @param params array of values to format into the string
* @return The formatted string
*/
public static String format(String message, Object[] params) {
int currentParamNumber = 0;
@ -307,39 +296,43 @@ public class StringUtil {
return 1;
}
/**
* @return the encoding we want to use, currently hardcoded to ISO-8859-1
/**
* @return the encoding we want to use, currently hardcoded to ISO-8859-1
*/
public static String getPreferredEncoding() {
return ENCODING;
return ENCODING_ISO_8859_1;
}
/**
* check the parameter has multibyte character
*
* @param value string to check
* @return boolean result
* true:string has at least one multibyte character
*
* @param value string to check
* @return boolean result true:string has at least one multibyte character
*/
public static boolean hasMultibyte(String value){
if( value == null )return false;
for(int i = 0 ; i < value.length() ; i++ ){
char c = value.charAt(i);
if(c > 0xFF )return true;
}
return false;
public static boolean hasMultibyte(String value) {
if (value == null)
return false;
for (int i = 0; i < value.length(); i++) {
char c = value.charAt(i);
if (c > 0xFF) {
return true;
}
}
return false;
}
/**
* Checks to see if a given String needs to be represented as Unicode
* @param value
*
* @param value
* @return true if string needs Unicode to be represented.
*/
public static boolean isUnicodeString(final String value) {
try {
return !value.equals(new String(value.getBytes("ISO-8859-1"), "ISO-8859-1"));
} catch (UnsupportedEncodingException e) {
return true;
}
}
public static boolean isUnicodeString(final String value) {
try {
return !value.equals(new String(value.getBytes(ENCODING_ISO_8859_1),
ENCODING_ISO_8859_1));
} catch (UnsupportedEncodingException e) {
return true;
}
}
}

View File

@ -21,56 +21,54 @@ package org.apache.poi.hssf.record;
import junit.framework.TestCase;
/**
* Tests the serialization and deserialization of the CommonObjectDataSubRecord
* Tests the serialization and deserialization of the {@link CommonObjectDataSubRecord}
* class works correctly. Test data taken directly from a real
* Excel file.
*
* @author Glen Stampoultzis (glens at apache.org)
*/
public final class TestCommonObjectDataSubRecord extends TestCase {
byte[] data = new byte[] {
(byte)0x12,(byte)0x00,(byte)0x01,(byte)0x00,
(byte)0x01,(byte)0x00,(byte)0x11,(byte)0x60,
(byte)0x00,(byte)0x00,(byte)0x00,(byte)0x00,
(byte)0x00,(byte)0x0D,(byte)0x26,(byte)0x01,
(byte)0x00,(byte)0x00,
};
byte[] data = new byte[] {
(byte)0x12,(byte)0x00,(byte)0x01,(byte)0x00,
(byte)0x01,(byte)0x00,(byte)0x11,(byte)0x60,
(byte)0x00,(byte)0x00,(byte)0x00,(byte)0x00,
(byte)0x00,(byte)0x0D,(byte)0x26,(byte)0x01,
(byte)0x00,(byte)0x00,
};
public void testLoad() {
CommonObjectDataSubRecord record = new CommonObjectDataSubRecord(TestcaseRecordInputStream.create(0x15, data));
public void testLoad() {
CommonObjectDataSubRecord record = new CommonObjectDataSubRecord(TestcaseRecordInputStream.createWithFakeSid(data), data.length);
assertEquals( CommonObjectDataSubRecord.OBJECT_TYPE_LIST_BOX, record.getObjectType());
assertEquals( (short)1, record.getObjectId());
assertEquals( (short)1, record.getOption());
assertEquals( true , record.isLocked() );
assertEquals( false, record.isPrintable() );
assertEquals( false, record.isAutofill() );
assertEquals( false, record.isAutoline() );
assertEquals( (int)24593, record.getReserved1());
assertEquals( (int)218103808, record.getReserved2());
assertEquals( (int)294, record.getReserved3());
assertEquals( 22 , record.getRecordSize() );
}
assertEquals( CommonObjectDataSubRecord.OBJECT_TYPE_LIST_BOX, record.getObjectType());
assertEquals((short) 1, record.getObjectId());
assertEquals((short) 1, record.getOption());
assertEquals(true, record.isLocked());
assertEquals(false, record.isPrintable());
assertEquals(false, record.isAutofill());
assertEquals(false, record.isAutoline());
assertEquals(24593, record.getReserved1());
assertEquals(218103808, record.getReserved2());
assertEquals(294, record.getReserved3());
assertEquals(18, record.getDataSize());
}
public void testStore()
{
CommonObjectDataSubRecord record = new CommonObjectDataSubRecord();
public void testStore() {
CommonObjectDataSubRecord record = new CommonObjectDataSubRecord();
record.setObjectType( CommonObjectDataSubRecord.OBJECT_TYPE_LIST_BOX );
record.setObjectId( (short) 1);
record.setOption( (short) 1);
record.setLocked( true );
record.setPrintable( false );
record.setAutofill( false );
record.setAutoline( false );
record.setReserved1( (int) 24593);
record.setReserved2( (int) 218103808);
record.setReserved3( (int) 294);
record.setObjectType(CommonObjectDataSubRecord.OBJECT_TYPE_LIST_BOX);
record.setObjectId((short) 1);
record.setOption((short) 1);
record.setLocked(true);
record.setPrintable(false);
record.setAutofill(false);
record.setAutoline(false);
record.setReserved1(24593);
record.setReserved2(218103808);
record.setReserved3(294);
byte [] recordBytes = record.serialize();
assertEquals(recordBytes.length - 4, data.length);
for (int i = 0; i < data.length; i++)
assertEquals("At offset " + i, data[i], recordBytes[i+4]);
}
byte [] recordBytes = record.serialize();
assertEquals(recordBytes.length - 4, data.length);
for (int i = 0; i < data.length; i++)
assertEquals("At offset " + i, data[i], recordBytes[i+4]);
}
}

View File

@ -42,13 +42,13 @@ public final class TestEmbeddedObjectRefSubRecord extends TestCase {
RecordInputStream in = new RecordInputStream(new ByteArrayInputStream(src));
in.nextRecord();
EmbeddedObjectRefSubRecord record1 = new EmbeddedObjectRefSubRecord(in);
EmbeddedObjectRefSubRecord record1 = new EmbeddedObjectRefSubRecord(in, src.length-4);
byte[] ser = record1.serialize();
RecordInputStream in2 = new RecordInputStream(new ByteArrayInputStream(ser));
in2.nextRecord();
EmbeddedObjectRefSubRecord record2 = new EmbeddedObjectRefSubRecord(in2);
EmbeddedObjectRefSubRecord record2 = new EmbeddedObjectRefSubRecord(in2, ser.length-4);
assertTrue(Arrays.equals(src, ser));
assertEquals(record1.getOLEClassName(), record2.getOLEClassName());
@ -64,7 +64,7 @@ public final class TestEmbeddedObjectRefSubRecord extends TestCase {
byte[] ser = record1.serialize();
RecordInputStream in2 = new RecordInputStream(new ByteArrayInputStream(ser));
in2.nextRecord();
EmbeddedObjectRefSubRecord record2 = new EmbeddedObjectRefSubRecord(in2);
EmbeddedObjectRefSubRecord record2 = new EmbeddedObjectRefSubRecord(in2, ser.length-4);
assertEquals(record1.getOLEClassName(), record2.getOLEClassName());
assertEquals(record1.getStreamId(), record2.getStreamId());
@ -88,7 +88,7 @@ public final class TestEmbeddedObjectRefSubRecord extends TestCase {
RecordInputStream in = new RecordInputStream(new ByteArrayInputStream(data));
in.nextRecord();
EmbeddedObjectRefSubRecord rec = new EmbeddedObjectRefSubRecord(in);
EmbeddedObjectRefSubRecord rec = new EmbeddedObjectRefSubRecord(in, data.length-4);
byte[] ser2 = rec.serialize();
assertTrue(Arrays.equals(data, ser2));
@ -129,7 +129,7 @@ public final class TestEmbeddedObjectRefSubRecord extends TestCase {
private static void confirmRead(byte[] data, int i) {
RecordInputStream in = TestcaseRecordInputStream.create(EmbeddedObjectRefSubRecord.sid, data);
EmbeddedObjectRefSubRecord rec = new EmbeddedObjectRefSubRecord(in);
EmbeddedObjectRefSubRecord rec = new EmbeddedObjectRefSubRecord(in, data.length);
byte[] ser2 = rec.serialize();
byte[] d2 = (byte[]) data.clone(); // remove sid+len for compare
System.arraycopy(ser2, 4, d2, 0, d2.length);

View File

@ -28,14 +28,11 @@ import junit.framework.TestCase;
* @author Glen Stampoultzis (glens at apache.org)
*/
public final class TestEndSubRecord extends TestCase {
byte[] data = new byte[] {
};
private static final byte[] data = { };
public void testLoad() {
EndSubRecord record = new EndSubRecord(TestcaseRecordInputStream.create(0x00, data));
assertEquals( 4, record.getRecordSize() );
EndSubRecord record = new EndSubRecord(TestcaseRecordInputStream.create(0x00, data), 0);
assertEquals(0, record.getDataSize());
}
public void testStore()

View File

@ -38,16 +38,16 @@ public final class TestNoteStructureSubRecord extends TestCase {
public void testRead() {
NoteStructureSubRecord record = new NoteStructureSubRecord(TestcaseRecordInputStream.create(NoteStructureSubRecord.sid, data));
NoteStructureSubRecord record = new NoteStructureSubRecord(TestcaseRecordInputStream.create(NoteStructureSubRecord.sid, data), data.length);
assertEquals(NoteStructureSubRecord.sid, record.getSid());
assertEquals(data.length + 4, record.getRecordSize());
assertEquals(data.length, record.getDataSize());
}
public void testWrite() {
NoteStructureSubRecord record = new NoteStructureSubRecord();
assertEquals(NoteStructureSubRecord.sid, record.getSid());
assertEquals(data.length + 4, record.getRecordSize());
assertEquals(data.length, record.getDataSize());
byte [] ser = record.serialize();
assertEquals(ser.length - 4, data.length);
@ -62,7 +62,7 @@ public final class TestNoteStructureSubRecord extends TestCase {
NoteStructureSubRecord cloned = (NoteStructureSubRecord)record.clone();
byte[] cln = cloned.serialize();
assertEquals(record.getRecordSize(), cloned.getRecordSize());
assertEquals(record.getDataSize(), cloned.getDataSize());
assertTrue(Arrays.equals(src, cln));
}
}

View File

@ -41,19 +41,19 @@ public final class TestObjRecord extends TestCase {
private static final byte[] recdata = {
0x15, 0x00, 0x12, 0x00, 0x06, 0x00, 0x01, 0x00, 0x11, 0x60,
(byte)0xF4, 0x02, 0x41, 0x01, 0x14, 0x10, 0x1F, 0x02, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00
0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
// TODO - this data seems to require two extra bytes padding. not sure where original file is.
// it's not bug 38607 attachment 17639
};
private static final byte[] recdataNeedingPadding = {
21, 0, 18, 0, 0, 0, 1, 0, 17, 96, 0, 0, 0, 0, 56, 111, -52, 3, 0, 0, 0, 0, 6, 0, 2, 0, 0, 0, 0, 0, 0, 0
21, 0, 18, 0, 0, 0, 1, 0, 17, 96, 0, 0, 0, 0, 56, 111, -52, 3, 0, 0, 0, 0, 6, 0, 2, 0, 0, 0, 0, 0, 0, 0
};
public void testLoad() {
ObjRecord record = new ObjRecord(TestcaseRecordInputStream.create(ObjRecord.sid, recdata));
assertEquals(28, record.getRecordSize() - 4);
assertEquals(26, record.getRecordSize() - 4);
List subrecords = record.getSubRecords();
assertEquals(2, subrecords.size() );
@ -66,7 +66,7 @@ public final class TestObjRecord extends TestCase {
ObjRecord record = new ObjRecord(TestcaseRecordInputStream.create(ObjRecord.sid, recdata));
byte [] recordBytes = record.serialize();
assertEquals(28, recordBytes.length - 4);
assertEquals(26, recordBytes.length - 4);
byte[] subData = new byte[recdata.length];
System.arraycopy(recordBytes, 4, subData, 0, subData.length);
assertTrue(Arrays.equals(recdata, subData));
@ -102,7 +102,7 @@ public final class TestObjRecord extends TestCase {
ObjRecord record = new ObjRecord(TestcaseRecordInputStream.create(ObjRecord.sid, recdataNeedingPadding));
if (record.getRecordSize() == 34) {
throw new AssertionFailedError("Identified bug 45133");
throw new AssertionFailedError("Identified bug 45133");
}
assertEquals(36, record.getRecordSize());

View File

@ -19,81 +19,94 @@
package org.apache.poi.hssf.record;
import java.util.Arrays;
import junit.framework.AssertionFailedError;
import junit.framework.TestCase;
import org.apache.poi.util.HexRead;
/**
* Tests Subrecord components of an OBJ record. Test data taken directly
* from a real Excel file.
*
* @author Michael Zalewski (zalewski@optonline.net)
*/
public class TestSubRecord
extends TestCase
{
/*
The following is a dump of the OBJ record corresponding to an auto-filter
drop-down list. The 3rd subrecord beginning at offset 0x002e (type=0x0013)
does not conform to the documentation, because the length field is 0x1fee,
which is longer than the entire OBJ record.
public final class TestSubRecord extends TestCase {
/*
The following is a dump of the OBJ record corresponding to an auto-filter
drop-down list. The 3rd subrecord beginning at offset 0x002e (type=0x0013)
does not conform to the documentation, because the length field is 0x1fee,
which is longer than the entire OBJ record.
00000000 15 00 12 00 14 00 01 00 01 21 00 00 00 00 3C 13 .........!....<. Type=0x15 Len=0x0012 ftCmo
00000010 F4 03 00 00 00 00
0C 00 14 00 00 00 00 00 00 00 ................ Type=0x0c Len=0x0014 ftSbs
00000020 00 00 00 00 01 00 08 00 00 00 10 00 00 00
13 00 ................ Type=0x13 Len=0x1FEE ftLbsData
00000030 EE 1F 00 00 08 00 08 00 01 03 00 00 0A 00 14 00 ................
00000040 6C 00
00 00 00 00 l..... Type=0x00 Len=0x0000 ftEnd
*/
00000000 15 00 12 00 14 00 01 00 01 21 00 00 00 00 3C 13 .........!....<. Type=0x15 Len=0x0012 ftCmo
00000010 F4 03 00 00 00 00
0C 00 14 00 00 00 00 00 00 00 ................ Type=0x0c Len=0x0014 ftSbs
00000020 00 00 00 00 01 00 08 00 00 00 10 00 00 00
13 00 ................ Type=0x13 Len=0x1FEE ftLbsData
00000030 EE 1F 00 00 08 00 08 00 01 03 00 00 0A 00 14 00 ................
00000040 6C 00
00 00 00 00 l..... Type=0x00 Len=0x0000 ftEnd
*/
byte[] dataAutoFilter = new byte[]{
// ftCmo
(byte) 0x15, (byte) 0x00, (byte) 0x12, (byte) 0x00, (byte) 0x14, (byte) 0x00, (byte) 0x01, (byte) 0x00
, (byte) 0x01, (byte) 0x00, (byte) 0x01, (byte) 0x21, (byte) 0x00, (byte) 0x00, (byte) 0x3c, (byte) 0x13
, (byte) 0xf4, (byte) 0x03, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00
private static final byte[] dataAutoFilter
= HexRead.readFromString(""
+ "5D 00 46 00 " // ObjRecord.sid, size=70
// ftCmo
+ "15 00 12 00 "
+ "14 00 01 00 01 00 01 21 00 00 3C 13 F4 03 00 00 00 00 "
// ftSbs (currently UnknownSubrecord)
+ "0C 00 14 00 "
+ "00 00 00 00 00 00 00 00 00 00 01 00 08 00 00 00 10 00 00 00 "
// ftLbsData (currently UnknownSubrecord)
+ "13 00 EE 1F 00 00 "
+ "08 00 08 00 01 03 00 00 0A 00 14 00 6C 00 "
// ftEnd
+ "00 00 00 00"
);
// ftSbs (currently UnknownSubrecord)
, (byte) 0x0c, (byte) 0x00
, (byte) 0x14, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00
, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x01, (byte) 0x00, (byte) 0x08, (byte) 0x00
, (byte) 0x00, (byte) 0x00, (byte) 0x10, (byte) 0x00, (byte) 0x00, (byte) 0x00
// ftLbsData (currently UnknownSubrecord)
, (byte) 0x13, (byte) 0x00
, (byte) 0xee, (byte) 0x1f, (byte) 0x00, (byte) 0x00, (byte) 0x08, (byte) 0x00, (byte) 0x08, (byte) 0x00
, (byte) 0x01, (byte) 0x03, (byte) 0x00, (byte) 0x00, (byte) 0x0a, (byte) 0x00, (byte) 0x14, (byte) 0x00
, (byte) 0x6c, (byte) 0x00
// ftEnd
, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00
};
public TestSubRecord( String name )
{
super( name );
}
public void testParseCmo()
{
//jmh Record r = SubRecord.createSubRecord( (short) 0x0015, (short) 0x0012, dataAutoFilter, 0x0000 );
//jmh assertEquals( "ftCmo is 22 bytes", 22, r.getRecordSize() );
//jmh assertEquals( "ftCmo is a CommonObjectDataSubRecord"
//jmh , "org.apache.poi.hssf.record.CommonObjectDataSubRecord"
//jmh , r.getClass().getName() );
}
public void testParseAutoFilterLbsData()
{
//jmh Record r = SubRecord.createSubRecord( (short) 0x0013, (short) 0x1fee, dataAutoFilter, 0x0032 );
//jmh assertEquals( "ftLbsData is 20 bytes", 20, r.getRecordSize() );
}
public void testParseEnd()
{
//jmh Record r = SubRecord.createSubRecord( (short) 0x0000, (short) 0x0000, dataAutoFilter, 0x0046 );
//jmh assertEquals( "ftEnd is 4 bytes", 4, r.getRecordSize() );
//jmh assertEquals( "ftEnd is a EndSubRecord"
//jmh , "org.apache.poi.hssf.record.EndSubRecord"
//jmh , r.getClass().getName() );
}
/**
* Make sure that ftLbsData (which has abnormal size info) is parsed correctly.
* If the size field is interpreted incorrectly, the resulting ObjRecord becomes way too big.
* At the time of fixing (Oct-2008 svn r707447) {@link RecordInputStream} allowed buffer
* read overruns, so the bug was mostly silent.
*/
public void testReadAll_bug45778() {
RecordInputStream in = TestcaseRecordInputStream.create(dataAutoFilter);
ObjRecord or = new ObjRecord(in);
byte[] data2 = or.serialize();
if (data2.length == 8228) {
throw new AssertionFailedError("Identified bug 45778");
}
assertEquals(74, data2.length);
assertTrue(Arrays.equals(dataAutoFilter, data2));
}
public void testReadManualComboWithFormula() {
byte[] data = HexRead.readFromString(""
+ "5D 00 66 00 "
+ "15 00 12 00 14 00 02 00 11 20 00 00 00 00 "
+ "20 44 C6 04 00 00 00 00 0C 00 14 00 04 F0 C6 04 "
+ "00 00 00 00 00 00 01 00 06 00 00 00 10 00 00 00 "
+ "0E 00 0C 00 05 00 80 44 C6 04 24 09 00 02 00 02 "
+ "13 00 DE 1F 10 00 09 00 80 44 C6 04 25 0A 00 0F "
+ "00 02 00 02 00 02 06 00 03 00 08 00 00 00 00 00 "
+ "08 00 00 00 00 00 00 00 " // TODO sometimes last byte is non-zero
);
RecordInputStream in = TestcaseRecordInputStream.create(data);
ObjRecord or = new ObjRecord(in);
byte[] data2 = or.serialize();
if (data2.length == 8228) {
throw new AssertionFailedError("Identified bug XXXXX");
}
assertEquals("Encoded length", data.length, data2.length);
for (int i = 0; i < data.length; i++) {
if (data[i] != data2[i]) {
throw new AssertionFailedError("Encoded data differs at index " + i);
}
}
assertTrue(Arrays.equals(data, data2));
}
}

View File

@ -0,0 +1,53 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import junit.framework.TestCase;
/**
* Class to test {@link LittleEndianInputStream} and {@link LittleEndianOutputStream}
*
* @author Josh Micich
*/
public final class TestLittleEndianStreams extends TestCase {
public void testRead() {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
LittleEndianOutput leo = new LittleEndianOutputStream(baos);
leo.writeInt(12345678);
leo.writeShort(12345);
leo.writeByte(123);
leo.writeShort(40000);
leo.writeByte(200);
leo.writeLong(1234567890123456789L);
leo.writeDouble(123.456);
LittleEndianInput lei = new LittleEndianInputStream(new ByteArrayInputStream(baos.toByteArray()));
assertEquals(12345678, lei.readInt());
assertEquals(12345, lei.readShort());
assertEquals(123, lei.readByte());
assertEquals(40000, lei.readUShort());
assertEquals(200, lei.readUByte());
assertEquals(1234567890123456789L, lei.readLong());
assertEquals(123.456, lei.readDouble(), 0.0);
}
}