Merged revisions 709570,709598,710114,710134,710136,711505,711513-711515,711694,711739,711741,711746,711749 via svnmerge from

https://svn.apache.org/repos/asf/poi/trunk

........
  r709570 | josh | 2008-10-31 14:17:08 -0700 (Fri, 31 Oct 2008) | 1 line
  
  made DrawingSelectionRecord into a plain BIFF record (not an escher holder aggregate). Added some interpretation of fields
........
  r709598 | josh | 2008-10-31 16:24:41 -0700 (Fri, 31 Oct 2008) | 1 line
  
  Simplified multiple record expansion logic
........
  r710114 | yegor | 2008-11-03 09:54:01 -0800 (Mon, 03 Nov 2008) | 1 line
  
  fixed #46122: Picture#getEscherBSERecord threw NullPointerException if EscherContainerRecord.BSTORE_CONTAINER was not found
........
  r710134 | yegor | 2008-11-03 11:19:39 -0800 (Mon, 03 Nov 2008) | 1 line
  
  fixed bug #46033: table cells had incorrect text type resulting in corrupted style info
........
  r710136 | yegor | 2008-11-03 11:23:52 -0800 (Mon, 03 Nov 2008) | 1 line
  
  updated status of the fixed bug #46033
........
  r711505 | josh | 2008-11-04 19:50:31 -0800 (Tue, 04 Nov 2008) | 1 line
  
  Refactored test case
........
  r711513 | josh | 2008-11-04 21:45:17 -0800 (Tue, 04 Nov 2008) | 1 line
  
  Converted ConstantValueParser to use plain Strings instead of UnicodeStrings
........
  r711514 | josh | 2008-11-04 21:52:35 -0800 (Tue, 04 Nov 2008) | 1 line
  
  Converted SupBookRecord to use plain Strings instead of UnicodeStrings
........
  r711515 | josh | 2008-11-04 22:15:59 -0800 (Tue, 04 Nov 2008) | 1 line
  
  Refactored test case
........
  r711694 | josh | 2008-11-05 12:46:00 -0800 (Wed, 05 Nov 2008) | 1 line
  
  Fixed bug in conversion to/from text cells
........
  r711739 | josh | 2008-11-05 15:28:55 -0800 (Wed, 05 Nov 2008) | 1 line
  
  Refactoring test case
........
  r711741 | josh | 2008-11-05 15:35:02 -0800 (Wed, 05 Nov 2008) | 1 line
  
  Refactoring test case
........
  r711746 | josh | 2008-11-05 15:45:42 -0800 (Wed, 05 Nov 2008) | 1 line
  
  Fixed mistake in test case.  Constant value was 4 bytes too large (should be max *data* size not max *record* size).
........
  r711749 | josh | 2008-11-05 17:12:41 -0800 (Wed, 05 Nov 2008) | 1 line
  
  Introduced ContinuableRecord to help fix serialization of StringRecords with large data.  Fixed TextObjectRecord to only write 16bit unicode when needed.  Simplification in UnicodeString.
........


git-svn-id: https://svn.apache.org/repos/asf/poi/branches/ooxml@711755 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Josh Micich 2008-11-06 02:38:06 +00:00
parent 99b3eba292
commit 178c3de886
37 changed files with 1393 additions and 1355 deletions

View File

@ -37,6 +37,8 @@
<!-- Don't forget to update status.xml too! --> <!-- Don't forget to update status.xml too! -->
<release version="3.5-beta4" date="2008-??-??"> <release version="3.5-beta4" date="2008-??-??">
<action dev="POI-DEVELOPERS" type="fix">46033 - fixed TableCell to correctly set text type</action>
<action dev="POI-DEVELOPERS" type="fix">46122 - fixed Picture.draw to skip rendering if picture data was not found</action>
<action dev="POI-DEVELOPERS" type="fix">15716 - memory usage optimisation - converted Ptg arrays into Formula objects</action> <action dev="POI-DEVELOPERS" type="fix">15716 - memory usage optimisation - converted Ptg arrays into Formula objects</action>
<action dev="POI-DEVELOPERS" type="add">46065 - added implementation for VALUE function</action> <action dev="POI-DEVELOPERS" type="add">46065 - added implementation for VALUE function</action>
<action dev="POI-DEVELOPERS" type="add">45966 - added implementation for FIND function</action> <action dev="POI-DEVELOPERS" type="add">45966 - added implementation for FIND function</action>

View File

@ -34,6 +34,8 @@
<!-- Don't forget to update changes.xml too! --> <!-- Don't forget to update changes.xml too! -->
<changes> <changes>
<release version="3.5-beta4" date="2008-??-??"> <release version="3.5-beta4" date="2008-??-??">
<action dev="POI-DEVELOPERS" type="fix">46033 - fixed TableCell to correctly set text type</action>
<action dev="POI-DEVELOPERS" type="fix">46122 - fixed Picture.draw to skip rendering if picture data was not found</action>
<action dev="POI-DEVELOPERS" type="fix">15716 - memory usage optimisation - converted Ptg arrays into Formula objects</action> <action dev="POI-DEVELOPERS" type="fix">15716 - memory usage optimisation - converted Ptg arrays into Formula objects</action>
<action dev="POI-DEVELOPERS" type="add">46065 - added implementation for VALUE function</action> <action dev="POI-DEVELOPERS" type="add">46065 - added implementation for VALUE function</action>
<action dev="POI-DEVELOPERS" type="add">45966 - added implementation for FIND function</action> <action dev="POI-DEVELOPERS" type="add">45966 - added implementation for FIND function</action>

View File

@ -29,8 +29,9 @@ import org.apache.poi.hssf.record.ExternalNameRecord;
import org.apache.poi.hssf.record.NameRecord; import org.apache.poi.hssf.record.NameRecord;
import org.apache.poi.hssf.record.Record; import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.SupBookRecord; import org.apache.poi.hssf.record.SupBookRecord;
import org.apache.poi.hssf.record.UnicodeString; import org.apache.poi.hssf.record.formula.Area3DPtg;
import org.apache.poi.hssf.record.formula.NameXPtg; import org.apache.poi.hssf.record.formula.NameXPtg;
import org.apache.poi.hssf.record.formula.Ref3DPtg;
/** /**
* Link Table (OOO pdf reference: 4.10.3 ) <p/> * Link Table (OOO pdf reference: 4.10.3 ) <p/>
@ -311,10 +312,10 @@ final class LinkTable {
return null; return null;
} }
int shIx = _externSheetRecord.getFirstSheetIndexFromRefIndex(extRefIndex); int shIx = _externSheetRecord.getFirstSheetIndexFromRefIndex(extRefIndex);
UnicodeString usSheetName = ebr.getSheetNames()[shIx]; String usSheetName = ebr.getSheetNames()[shIx];
return new String[] { return new String[] {
ebr.getURL(), ebr.getURL(),
usSheetName.getString(), usSheetName,
}; };
} }
@ -345,9 +346,9 @@ final class LinkTable {
return result; return result;
} }
private static int getSheetIndex(UnicodeString[] sheetNames, String sheetName) { private static int getSheetIndex(String[] sheetNames, String sheetName) {
for (int i = 0; i < sheetNames.length; i++) { for (int i = 0; i < sheetNames.length; i++) {
if (sheetNames[i].getString().equals(sheetName)) { if (sheetNames[i].equals(sheetName)) {
return i; return i;
} }

View File

@ -17,25 +17,129 @@
package org.apache.poi.hssf.record; package org.apache.poi.hssf.record;
public final class DrawingSelectionRecord extends AbstractEscherHolderRecord { import org.apache.poi.util.HexDump;
public static final short sid = 0xED; import org.apache.poi.util.LittleEndianByteArrayOutputStream;
import org.apache.poi.util.LittleEndianInput;
import org.apache.poi.util.LittleEndianOutput;
public DrawingSelectionRecord() /**
{ * MsoDrawingSelection (0x00ED)<p/>
} * Reference:
* [MS-OGRAPH].pdf sec 2.4.69
*
* @author Josh Micich
*/
public final class DrawingSelectionRecord extends Record {
public static final short sid = 0x00ED;
public DrawingSelectionRecord( RecordInputStream in ) /**
{ * From [MS-ODRAW].pdf sec 2.2.1<br/>
super( in ); * TODO - make EscherRecordHeader {@link LittleEndianInput} aware and refactor with this
} */
private static final class OfficeArtRecordHeader {
public static final int ENCODED_SIZE = 8;
/**
* lower 4 bits is 'version' usually 0x01 or 0x0F (for containers)<br/>
* upper 12 bits is 'instance'
*/
private final int _verAndInstance;
/** value should be between 0xF000 and 0xFFFF */
private final int _type;
private final int _length;
protected String getRecordName() public OfficeArtRecordHeader(LittleEndianInput in) {
{ _verAndInstance = in.readUShort();
return "MSODRAWINGSELECTION"; _type = in.readUShort();
} _length = in.readInt();
}
public short getSid() public void serialize(LittleEndianOutput out) {
{ out.writeShort(_verAndInstance);
return sid; out.writeShort(_type);
} out.writeInt(_length);
}
public String debugFormatAsString() {
StringBuffer sb = new StringBuffer(32);
sb.append("ver+inst=").append(HexDump.shortToHex(_verAndInstance));
sb.append(" type=").append(HexDump.shortToHex(_type));
sb.append(" len=").append(HexDump.intToHex(_length));
return sb.toString();
}
}
// [MS-OGRAPH].pdf says that the data of this record is an OfficeArtFDGSL structure
// as described in[MS-ODRAW].pdf sec 2.2.33
private OfficeArtRecordHeader _header;
private int _cpsp;
/** a MSODGSLK enum value for the current selection mode */
private int _dgslk;
private int _spidFocus;
/** selected shape IDs (e.g. from EscherSpRecord.ShapeId) */
private int[] _shapeIds;
public DrawingSelectionRecord(RecordInputStream in) {
_header = new OfficeArtRecordHeader(in);
_cpsp = in.readInt();
_dgslk = in.readInt();
_spidFocus = in.readInt();
int nShapes = in.available() / 4;
int[] shapeIds = new int[nShapes];
for (int i = 0; i < nShapes; i++) {
shapeIds[i] = in.readInt();
}
_shapeIds = shapeIds;
}
public short getSid() {
return sid;
}
protected int getDataSize() {
return OfficeArtRecordHeader.ENCODED_SIZE
+ 12 // 3 int fields
+ _shapeIds.length * 4;
}
public int serialize(int offset, byte[] data) {
int dataSize = getDataSize();
int recSize = 4 + dataSize;
LittleEndianOutput out = new LittleEndianByteArrayOutputStream(data, offset, recSize);
out.writeShort(sid);
out.writeShort(dataSize);
_header.serialize(out);
out.writeInt(_cpsp);
out.writeInt(_dgslk);
out.writeInt(_spidFocus);
for (int i = 0; i < _shapeIds.length; i++) {
out.writeInt(_shapeIds[i]);
}
return recSize;
}
public Object clone() {
// currently immutable
return this;
}
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append("[MSODRAWINGSELECTION]\n");
sb.append(" .rh =(").append(_header.debugFormatAsString()).append(")\n");
sb.append(" .cpsp =").append(HexDump.intToHex(_cpsp)).append('\n');
sb.append(" .dgslk =").append(HexDump.intToHex(_dgslk)).append('\n');
sb.append(" .spidFocus=").append(HexDump.intToHex(_spidFocus)).append('\n');
sb.append(" .shapeIds =(");
for (int i = 0; i < _shapeIds.length; i++) {
if (i > 0) {
sb.append(", ");
}
sb.append(HexDump.intToHex(_shapeIds[i]));
}
sb.append(")\n");
sb.append("[/MSODRAWINGSELECTION]\n");
return sb.toString();
}
} }

View File

@ -49,7 +49,7 @@ public final class RecordFactory {
* contains the classes for all the records we want to parse.<br/> * contains the classes for all the records we want to parse.<br/>
* Note - this most but not *every* subclass of Record. * Note - this most but not *every* subclass of Record.
*/ */
private static final Class[] records = { private static final Class[] recordClasses = {
ArrayRecord.class, ArrayRecord.class,
BackupRecord.class, BackupRecord.class,
BlankRecord.class, BlankRecord.class,
@ -163,7 +163,7 @@ public final class RecordFactory {
/** /**
* cache of the recordsToMap(); * cache of the recordsToMap();
*/ */
private static Map recordsMap = recordsToMap(records); private static Map recordsMap = recordsToMap(recordClasses);
private static short[] _allKnownRecordSIDs; private static short[] _allKnownRecordSIDs;
@ -172,16 +172,33 @@ public final class RecordFactory {
* are returned digested into the non-mul form. * are returned digested into the non-mul form.
*/ */
public static Record [] createRecord(RecordInputStream in) { public static Record [] createRecord(RecordInputStream in) {
Record record = createSingleRecord(in);
if (record instanceof DBCellRecord) {
// Not needed by POI. Regenerated from scratch by POI when spreadsheet is written
return new Record[] { null, };
}
if (record instanceof RKRecord) {
return new Record[] { convertToNumberRecord((RKRecord) record), };
}
if (record instanceof MulRKRecord) {
return convertRKRecords((MulRKRecord)record);
}
if (record instanceof MulBlankRecord) {
return convertMulBlankRecords((MulBlankRecord)record);
}
return new Record[] { record, };
}
private static Record createSingleRecord(RecordInputStream in) {
Constructor constructor = (Constructor) recordsMap.get(new Short(in.getSid())); Constructor constructor = (Constructor) recordsMap.get(new Short(in.getSid()));
if (constructor == null) { if (constructor == null) {
return new Record[] { new UnknownRecord(in), }; return new UnknownRecord(in);
} }
Record retval;
try { try {
retval = ( Record ) constructor.newInstance(new Object[] { in }); return (Record) constructor.newInstance(new Object[] { in });
} catch (InvocationTargetException e) { } catch (InvocationTargetException e) {
throw new RecordFormatException("Unable to construct record instance" , e.getTargetException()); throw new RecordFormatException("Unable to construct record instance" , e.getTargetException());
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
@ -191,54 +208,55 @@ public final class RecordFactory {
} catch (IllegalAccessException e) { } catch (IllegalAccessException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
}
if (retval instanceof RKRecord) {
// RK record is a slightly smaller alternative to NumberRecord
// POI likes NumberRecord better
RKRecord rk = ( RKRecord ) retval;
NumberRecord num = new NumberRecord();
num.setColumn(rk.getColumn()); /**
num.setRow(rk.getRow()); * RK record is a slightly smaller alternative to NumberRecord
num.setXFIndex(rk.getXFIndex()); * POI likes NumberRecord better
num.setValue(rk.getRKNumber()); */
return new Record[] { num, }; private static NumberRecord convertToNumberRecord(RKRecord rk) {
NumberRecord num = new NumberRecord();
num.setColumn(rk.getColumn());
num.setRow(rk.getRow());
num.setXFIndex(rk.getXFIndex());
num.setValue(rk.getRKNumber());
return num;
}
/**
* Converts a {@link MulRKRecord} into an equivalent array of {@link NumberRecord}s
*/
private static NumberRecord[] convertRKRecords(MulRKRecord mrk) {
NumberRecord[] mulRecs = new NumberRecord[mrk.getNumColumns()];
for (int k = 0; k < mrk.getNumColumns(); k++) {
NumberRecord nr = new NumberRecord();
nr.setColumn((short) (k + mrk.getFirstColumn()));
nr.setRow(mrk.getRow());
nr.setXFIndex(mrk.getXFAt(k));
nr.setValue(mrk.getRKNumberAt(k));
mulRecs[k] = nr;
} }
if (retval instanceof DBCellRecord) { return mulRecs;
// Not needed by POI. Regenerated from scratch by POI when spreadsheet is written }
return new Record[] { null, };
/**
* Converts a {@link MulBlankRecord} into an equivalent array of {@link BlankRecord}s
*/
private static BlankRecord[] convertMulBlankRecords(MulBlankRecord mb) {
BlankRecord[] mulRecs = new BlankRecord[mb.getNumColumns()];
for (int k = 0; k < mb.getNumColumns(); k++) {
BlankRecord br = new BlankRecord();
br.setColumn((short) (k + mb.getFirstColumn()));
br.setRow(mb.getRow());
br.setXFIndex(mb.getXFAt(k));
mulRecs[k] = br;
} }
// expand multiple records where necessary return mulRecs;
if (retval instanceof MulRKRecord) {
MulRKRecord mrk = ( MulRKRecord ) retval;
Record[] mulRecs = new Record[ mrk.getNumColumns() ];
for (int k = 0; k < mrk.getNumColumns(); k++) {
NumberRecord nr = new NumberRecord();
nr.setColumn(( short ) (k + mrk.getFirstColumn()));
nr.setRow(mrk.getRow());
nr.setXFIndex(mrk.getXFAt(k));
nr.setValue(mrk.getRKNumberAt(k));
mulRecs[ k ] = nr;
}
return mulRecs;
}
if (retval instanceof MulBlankRecord) {
MulBlankRecord mb = ( MulBlankRecord ) retval;
Record[] mulRecs = new Record[ mb.getNumColumns() ];
for (int k = 0; k < mb.getNumColumns(); k++) {
BlankRecord br = new BlankRecord();
br.setColumn(( short ) (k + mb.getFirstColumn()));
br.setRow(mb.getRow());
br.setXFIndex(mb.getXFAt(k));
mulRecs[ k ] = br;
}
return mulRecs;
}
return new Record[] { retval, };
} }
/** /**
@ -325,19 +343,26 @@ public final class RecordFactory {
// After EOF, Excel seems to pad block with zeros // After EOF, Excel seems to pad block with zeros
continue; continue;
} }
Record[] recs = createRecord(recStream); // handle MulRK records Record record = createSingleRecord(recStream);
if (recs.length > 1) { if (record instanceof DBCellRecord) {
for (int k = 0; k < recs.length; k++) { // Not needed by POI. Regenerated from scratch by POI when spreadsheet is written
records.add(recs[ k ]); // these will be number records
}
continue; continue;
} }
Record record = recs[ 0 ];
if (record == null) { if (record instanceof RKRecord) {
records.add(convertToNumberRecord((RKRecord) record));
continue; continue;
} }
if (record instanceof MulRKRecord) {
addAll(records, convertRKRecords((MulRKRecord)record));
continue;
}
if (record instanceof MulBlankRecord) {
addAll(records, convertMulBlankRecords((MulBlankRecord)record));
continue;
}
if (record.getSid() == DrawingGroupRecord.sid if (record.getSid() == DrawingGroupRecord.sid
&& lastRecord instanceof DrawingGroupRecord) { && lastRecord instanceof DrawingGroupRecord) {
DrawingGroupRecord lastDGRecord = (DrawingGroupRecord) lastRecord; DrawingGroupRecord lastDGRecord = (DrawingGroupRecord) lastRecord;
@ -354,8 +379,6 @@ public final class RecordFactory {
records.add(record); records.add(record);
} else if (lastRecord instanceof DrawingGroupRecord) { } else if (lastRecord instanceof DrawingGroupRecord) {
((DrawingGroupRecord)lastRecord).processContinueRecord(contRec.getData()); ((DrawingGroupRecord)lastRecord).processContinueRecord(contRec.getData());
} else if (lastRecord instanceof StringRecord) {
((StringRecord)lastRecord).processContinueRecord(contRec.getData());
} else if (lastRecord instanceof UnknownRecord) { } else if (lastRecord instanceof UnknownRecord) {
//Gracefully handle records that we don't know about, //Gracefully handle records that we don't know about,
//that happen to be continued //that happen to be continued
@ -373,4 +396,10 @@ public final class RecordFactory {
} }
return records; return records;
} }
private static void addAll(List destList, Record[] srcRecs) {
for (int i = 0; i < srcRecs.length; i++) {
destList.add(srcRecs[i]);
}
}
} }

View File

@ -320,16 +320,6 @@ public final class RecordInputStream extends InputStream implements LittleEndian
} }
} }
/** Returns an excel style unicode string from the bytes reminaing in the record.
* <i>Note:</i> Unicode strings differ from <b>normal</b> strings due to the addition of
* formatting information.
*
* @return The unicode string representation of the remaining bytes.
*/
public UnicodeString readUnicodeString() {
return new UnicodeString(this);
}
/** Returns the remaining bytes for the current record. /** Returns the remaining bytes for the current record.
* *
* @return The remaining bytes of the current record. * @return The remaining bytes of the current record.

View File

@ -17,14 +17,16 @@
package org.apache.poi.hssf.record; package org.apache.poi.hssf.record;
import java.util.Iterator;
import org.apache.poi.hssf.record.cont.ContinuableRecord;
import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
import org.apache.poi.util.IntMapper; import org.apache.poi.util.IntMapper;
import org.apache.poi.util.LittleEndianConsts; import org.apache.poi.util.LittleEndianConsts;
import java.util.Iterator;
/** /**
* Title: Static String Table Record * Title: Static String Table Record (0x00FC)<p/>
* <P> *
* Description: This holds all the strings for LabelSSTRecords. * Description: This holds all the strings for LabelSSTRecords.
* <P> * <P>
* REFERENCE: PG 389 Microsoft Excel 97 Developer's Kit (ISBN: * REFERENCE: PG 389 Microsoft Excel 97 Developer's Kit (ISBN:
@ -37,27 +39,20 @@ import java.util.Iterator;
* @see org.apache.poi.hssf.record.LabelSSTRecord * @see org.apache.poi.hssf.record.LabelSSTRecord
* @see org.apache.poi.hssf.record.ContinueRecord * @see org.apache.poi.hssf.record.ContinueRecord
*/ */
public final class SSTRecord extends Record { public final class SSTRecord extends ContinuableRecord {
public static final short sid = 0x00FC; public static final short sid = 0x00FC;
private static UnicodeString EMPTY_STRING = new UnicodeString(""); private static final UnicodeString EMPTY_STRING = new UnicodeString("");
/** how big can an SST record be? As big as any record can be: 8228 bytes */
static final int MAX_RECORD_SIZE = 8228;
// TODO - move these constants to test class (the only consumer)
/** standard record overhead: two shorts (record id plus data space size)*/ /** standard record overhead: two shorts (record id plus data space size)*/
static final int STD_RECORD_OVERHEAD = static final int STD_RECORD_OVERHEAD = 2 * LittleEndianConsts.SHORT_SIZE;
2 * LittleEndianConsts.SHORT_SIZE;
/** SST overhead: the standard record overhead, plus the number of strings and the number of unique strings -- two ints */ /** SST overhead: the standard record overhead, plus the number of strings and the number of unique strings -- two ints */
static final int SST_RECORD_OVERHEAD = static final int SST_RECORD_OVERHEAD = STD_RECORD_OVERHEAD + 2 * LittleEndianConsts.INT_SIZE;
( STD_RECORD_OVERHEAD + ( 2 * LittleEndianConsts.INT_SIZE ) );
/** how much data can we stuff into an SST record? That would be _max minus the standard SST record overhead */ /** how much data can we stuff into an SST record? That would be _max minus the standard SST record overhead */
static final int MAX_DATA_SPACE = MAX_RECORD_SIZE - SST_RECORD_OVERHEAD; static final int MAX_DATA_SPACE = RecordInputStream.MAX_RECORD_DATA_SIZE - 8;
/** overhead for each string includes the string's character count (a short) and the flag describing its characteristics (a byte) */
static final int STRING_MINIMAL_OVERHEAD = LittleEndianConsts.SHORT_SIZE + LittleEndianConsts.BYTE_SIZE;
/** union of strings in the SST and EXTSST */ /** union of strings in the SST and EXTSST */
private int field_1_num_strings; private int field_1_num_strings;
@ -133,37 +128,6 @@ public final class SSTRecord extends Record {
return field_2_num_unique_strings; return field_2_num_unique_strings;
} }
/**
* USE THIS METHOD AT YOUR OWN PERIL: THE <code>addString</code>
* METHODS MANIPULATE THE NUMBER OF STRINGS AS A SIDE EFFECT; YOUR
* ATTEMPTS AT MANIPULATING THE STRING COUNT IS LIKELY TO BE VERY
* WRONG AND WILL RESULT IN BAD BEHAVIOR WHEN THIS RECORD IS
* WRITTEN OUT AND ANOTHER PROCESS ATTEMPTS TO READ THE RECORD
*
* @param count number of strings
*
*/
public void setNumStrings( final int count )
{
field_1_num_strings = count;
}
/**
* USE THIS METHOD AT YOUR OWN PERIL: THE <code>addString</code>
* METHODS MANIPULATE THE NUMBER OF UNIQUE STRINGS AS A SIDE
* EFFECT; YOUR ATTEMPTS AT MANIPULATING THE UNIQUE STRING COUNT
* IS LIKELY TO BE VERY WRONG AND WILL RESULT IN BAD BEHAVIOR WHEN
* THIS RECORD IS WRITTEN OUT AND ANOTHER PROCESS ATTEMPTS TO READ
* THE RECORD
*
* @param count number of strings
*/
public void setNumUniqueStrings( final int count )
{
field_2_num_unique_strings = count;
}
/** /**
* Get a particular string by its index * Get a particular string by its index
@ -178,11 +142,6 @@ public final class SSTRecord extends Record {
return (UnicodeString) field_3_strings.get( id ); return (UnicodeString) field_3_strings.get( id );
} }
public boolean isString16bit( final int id )
{
UnicodeString unicodeString = ( (UnicodeString) field_3_strings.get( id ) );
return ( ( unicodeString.getOptionFlags() & 0x01 ) == 1 );
}
/** /**
* Return a debugging string representation * Return a debugging string representation
@ -350,29 +309,11 @@ public final class SSTRecord extends Record {
return field_3_strings.size(); return field_3_strings.size();
} }
/** protected void serialize(ContinuableRecordOutput out) {
* called by the class that is responsible for writing this sucker. SSTSerializer serializer = new SSTSerializer(field_3_strings, getNumStrings(), getNumUniqueStrings() );
* Subclasses should implement this so that their data is passed back in a serializer.serialize(out);
* byte array.
*
* @return size
*/
public int serialize( int offset, byte[] data )
{
SSTSerializer serializer = new SSTSerializer(
field_3_strings, getNumStrings(), getNumUniqueStrings() );
int bytes = serializer.serialize( offset, data );
bucketAbsoluteOffsets = serializer.getBucketAbsoluteOffsets(); bucketAbsoluteOffsets = serializer.getBucketAbsoluteOffsets();
bucketRelativeOffsets = serializer.getBucketRelativeOffsets(); bucketRelativeOffsets = serializer.getBucketRelativeOffsets();
return bytes;
}
protected int getDataSize() {
SSTRecordSizeCalculator calculator = new SSTRecordSizeCalculator(field_3_strings);
int recordSize = calculator.getRecordSize();
return recordSize-4;
} }
SSTDeserializer getDeserializer() SSTDeserializer getDeserializer()

View File

@ -1,76 +0,0 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianConsts;
/**
* Write out an SST header record.
*
* @author Glen Stampoultzis (glens at apache.org)
*/
class SSTRecordHeader
{
int numStrings;
int numUniqueStrings;
public SSTRecordHeader( int numStrings, int numUniqueStrings )
{
this.numStrings = numStrings;
this.numUniqueStrings = numUniqueStrings;
}
/**
* Writes out the SST record. This consists of the sid, the record size, the number of
* strings and the number of unique strings.
*
* @param data The data buffer to write the header to.
* @param bufferIndex The index into the data buffer where the header should be written.
* @param recSize The number of records written.
*
* @return The bufer of bytes modified.
*/
public int writeSSTHeader( UnicodeString.UnicodeRecordStats stats, byte[] data, int bufferIndex, int recSize )
{
int offset = bufferIndex;
LittleEndian.putShort( data, offset, SSTRecord.sid );
offset += LittleEndianConsts.SHORT_SIZE;
stats.recordSize += LittleEndianConsts.SHORT_SIZE;
stats.remainingSize -= LittleEndianConsts.SHORT_SIZE;
//Delay writing the length
stats.lastLengthPos = offset;
offset += LittleEndianConsts.SHORT_SIZE;
stats.recordSize += LittleEndianConsts.SHORT_SIZE;
stats.remainingSize -= LittleEndianConsts.SHORT_SIZE;
LittleEndian.putInt( data, offset, numStrings );
offset += LittleEndianConsts.INT_SIZE;
stats.recordSize += LittleEndianConsts.INT_SIZE;
stats.remainingSize -= LittleEndianConsts.INT_SIZE;
LittleEndian.putInt( data, offset, numUniqueStrings );
offset += LittleEndianConsts.INT_SIZE;
stats.recordSize += LittleEndianConsts.INT_SIZE;
stats.remainingSize -= LittleEndianConsts.INT_SIZE;
return offset - bufferIndex;
}
}

View File

@ -1,51 +0,0 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record;
import org.apache.poi.util.IntMapper;
/**
* Used to calculate the record sizes for a particular record. This kind of
* sucks because it's similar to the SST serialization code. In general
* the SST serialization code needs to be rewritten.
*
* @author Glen Stampoultzis (glens at apache.org)
* @author Jason Height (jheight at apache.org)
*/
class SSTRecordSizeCalculator
{
private IntMapper strings;
public SSTRecordSizeCalculator(IntMapper strings)
{
this.strings = strings;
}
public int getRecordSize() {
UnicodeString.UnicodeRecordStats rs = new UnicodeString.UnicodeRecordStats();
rs.remainingSize -= SSTRecord.SST_RECORD_OVERHEAD;
rs.recordSize += SSTRecord.SST_RECORD_OVERHEAD;
for (int i=0; i < strings.size(); i++ )
{
UnicodeString unistr = ( (UnicodeString) strings.get(i));
unistr.getRecordSize(rs);
}
return rs.recordSize;
}
}

View File

@ -1,4 +1,3 @@
/* ==================================================================== /* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with contributor license agreements. See the NOTICE file distributed with
@ -15,12 +14,11 @@
See the License for the specific language governing permissions and See the License for the specific language governing permissions and
limitations under the License. limitations under the License.
==================================================================== */ ==================================================================== */
package org.apache.poi.hssf.record; package org.apache.poi.hssf.record;
import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
import org.apache.poi.util.IntMapper; import org.apache.poi.util.IntMapper;
import org.apache.poi.util.LittleEndian;
/** /**
* This class handles serialization of SST records. It utilizes the record processor * This class handles serialization of SST records. It utilizes the record processor
@ -28,71 +26,50 @@ import org.apache.poi.util.LittleEndian;
* *
* @author Glen Stampoultzis (glens at apache.org) * @author Glen Stampoultzis (glens at apache.org)
*/ */
class SSTSerializer final class SSTSerializer {
{
// todo: make private again private final int _numStrings;
private IntMapper strings; private final int _numUniqueStrings;
private SSTRecordHeader sstRecordHeader; private final IntMapper strings;
/** Offsets from the beginning of the SST record (even across continuations) */ /** Offsets from the beginning of the SST record (even across continuations) */
int[] bucketAbsoluteOffsets; private final int[] bucketAbsoluteOffsets;
/** Offsets relative the start of the current SST or continue record */ /** Offsets relative the start of the current SST or continue record */
int[] bucketRelativeOffsets; private final int[] bucketRelativeOffsets;
int startOfSST, startOfRecord; int startOfSST, startOfRecord;
public SSTSerializer( IntMapper strings, int numStrings, int numUniqueStrings ) public SSTSerializer( IntMapper strings, int numStrings, int numUniqueStrings )
{ {
this.strings = strings; this.strings = strings;
this.sstRecordHeader = new SSTRecordHeader( numStrings, numUniqueStrings ); _numStrings = numStrings;
_numUniqueStrings = numUniqueStrings;
int infoRecs = ExtSSTRecord.getNumberOfInfoRecsForStrings(strings.size()); int infoRecs = ExtSSTRecord.getNumberOfInfoRecsForStrings(strings.size());
this.bucketAbsoluteOffsets = new int[infoRecs]; this.bucketAbsoluteOffsets = new int[infoRecs];
this.bucketRelativeOffsets = new int[infoRecs]; this.bucketRelativeOffsets = new int[infoRecs];
} }
/** public void serialize(ContinuableRecordOutput out) {
* Create a byte array consisting of an SST record and any out.writeInt(_numStrings);
* required Continue records, ready to be written out. out.writeInt(_numUniqueStrings);
* <p>
* If an SST record and any subsequent Continue records are read
* in to create this instance, this method should produce a byte
* array that is identical to the byte array produced by
* concatenating the input records' data.
*
* @return the byte array
*/
public int serialize(int offset, byte[] data )
{
UnicodeString.UnicodeRecordStats stats = new UnicodeString.UnicodeRecordStats();
sstRecordHeader.writeSSTHeader( stats, data, 0 + offset, 0 );
int pos = offset + SSTRecord.SST_RECORD_OVERHEAD;
for ( int k = 0; k < strings.size(); k++ ) for ( int k = 0; k < strings.size(); k++ )
{ {
if (k % ExtSSTRecord.DEFAULT_BUCKET_SIZE == 0) if (k % ExtSSTRecord.DEFAULT_BUCKET_SIZE == 0)
{ {
int rOff = out.getTotalSize();
int index = k/ExtSSTRecord.DEFAULT_BUCKET_SIZE; int index = k/ExtSSTRecord.DEFAULT_BUCKET_SIZE;
if (index < ExtSSTRecord.MAX_BUCKETS) { if (index < ExtSSTRecord.MAX_BUCKETS) {
//Excel only indexes the first 128 buckets. //Excel only indexes the first 128 buckets.
bucketAbsoluteOffsets[index] = pos-offset; bucketAbsoluteOffsets[index] = rOff;
bucketRelativeOffsets[index] = pos-offset; bucketRelativeOffsets[index] = rOff;
} }
} }
UnicodeString s = getUnicodeString(k); UnicodeString s = getUnicodeString(k);
pos += s.serialize(stats, pos, data); s.serialize(out);
} }
//Check to see if there is a hanging continue record length }
if (stats.lastLengthPos != -1) {
short lastRecordLength = (short)(pos - stats.lastLengthPos-2);
if (lastRecordLength > 8224)
throw new InternalError();
LittleEndian.putShort(data, stats.lastLengthPos, lastRecordLength);
}
return pos - offset;
}
private UnicodeString getUnicodeString( int index ) private UnicodeString getUnicodeString( int index )

View File

@ -17,19 +17,23 @@
package org.apache.poi.hssf.record; package org.apache.poi.hssf.record;
import org.apache.poi.util.LittleEndian; import org.apache.poi.hssf.record.cont.ContinuableRecord;
import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
import org.apache.poi.util.StringUtil; import org.apache.poi.util.StringUtil;
/** /**
* Supports the STRING record structure. (0x0207) * STRING (0x0207)<p/>
*
* Stores the cached result of a text formula
* *
* @author Glen Stampoultzis (glens at apache.org) * @author Glen Stampoultzis (glens at apache.org)
*/ */
public class StringRecord extends Record { public final class StringRecord extends ContinuableRecord {
public final static short sid = 0x0207;
private int field_1_string_length; public final static short sid = 0x0207;
private byte field_2_unicode_flag;
private String field_3_string; private boolean _is16bitUnicode;
private String _text;
public StringRecord() public StringRecord()
@ -39,77 +43,24 @@ public class StringRecord extends Record {
/** /**
* @param in the RecordInputstream to read the record from * @param in the RecordInputstream to read the record from
*/ */
public StringRecord( RecordInputStream in) public StringRecord( RecordInputStream in) {
{ int field_1_string_length = in.readUShort();
field_1_string_length = in.readShort(); _is16bitUnicode = in.readByte() != 0x00;
field_2_unicode_flag = in.readByte();
byte[] data = in.readRemainder(); if (_is16bitUnicode){
//Why isn't this using the in.readString methods??? _text = in.readUnicodeLEString(field_1_string_length);
if (isUnCompressedUnicode()) } else {
{ _text = in.readCompressedUnicode(field_1_string_length);
field_3_string = StringUtil.getFromUnicodeLE(data, 0, field_1_string_length );
}
else
{
field_3_string = StringUtil.getFromCompressedUnicode(data, 0, field_1_string_length);
} }
} }
public void processContinueRecord(byte[] data) {
if(isUnCompressedUnicode()) { protected void serialize(ContinuableRecordOutput out) {
field_3_string += StringUtil.getFromUnicodeLE(data, 0, field_1_string_length - field_3_string.length()); out.writeShort(_text.length());
} else { out.writeStringData(_text);
field_3_string += StringUtil.getFromCompressedUnicode(data, 0, field_1_string_length - field_3_string.length());
}
} }
private int getStringByteLength()
{
return isUnCompressedUnicode() ? field_1_string_length * 2 : field_1_string_length;
}
protected int getDataSize() {
return 2 + 1 + getStringByteLength();
}
/**
* is this uncompressed unicode (16bit)? Or just 8-bit compressed?
* @return isUnicode - True for 16bit- false for 8bit
*/
public boolean isUnCompressedUnicode()
{
return (field_2_unicode_flag == 1);
}
/**
* called by the class that is responsible for writing this sucker.
* Subclasses should implement this so that their data is passed back in a
* byte array.
*
* @param offset to begin writing at
* @param data byte array containing instance data
* @return number of bytes written
*/
public int serialize( int offset, byte[] data )
{
LittleEndian.putUShort(data, 0 + offset, sid);
LittleEndian.putUShort(data, 2 + offset, 3 + getStringByteLength());
LittleEndian.putUShort(data, 4 + offset, field_1_string_length);
data[6 + offset] = field_2_unicode_flag;
if (isUnCompressedUnicode())
{
StringUtil.putUnicodeLE(field_3_string, data, 7 + offset);
}
else
{
StringUtil.putCompressedUnicode(field_3_string, data, 7 + offset);
}
return getRecordSize();
}
/**
* return the non static version of the id for this record.
*/
public short getSid() public short getSid()
{ {
return sid; return sid;
@ -120,26 +71,16 @@ public class StringRecord extends Record {
*/ */
public String getString() public String getString()
{ {
return field_3_string; return _text;
} }
/**
* Sets whether the string is compressed or not
* @param unicode_flag 1 = uncompressed, 0 = compressed
*/
public void setCompressedFlag( byte unicode_flag )
{
this.field_2_unicode_flag = unicode_flag;
}
/** /**
* Sets the string represented by this record. * Sets the string represented by this record.
*/ */
public void setString( String string ) public void setString(String string) {
{ _text = string;
this.field_1_string_length = string.length(); _is16bitUnicode = StringUtil.hasMultibyte(string);
this.field_3_string = string;
setCompressedFlag(StringUtil.hasMultibyte(string) ? (byte)1 : (byte)0);
} }
public String toString() public String toString()
@ -148,16 +89,15 @@ public class StringRecord extends Record {
buffer.append("[STRING]\n"); buffer.append("[STRING]\n");
buffer.append(" .string = ") buffer.append(" .string = ")
.append(field_3_string).append("\n"); .append(_text).append("\n");
buffer.append("[/STRING]\n"); buffer.append("[/STRING]\n");
return buffer.toString(); return buffer.toString();
} }
public Object clone() { public Object clone() {
StringRecord rec = new StringRecord(); StringRecord rec = new StringRecord();
rec.field_1_string_length = this.field_1_string_length; rec._is16bitUnicode= _is16bitUnicode;
rec.field_2_unicode_flag= this.field_2_unicode_flag; rec._text = _text;
rec.field_3_string = this.field_3_string;
return rec; return rec;
} }
} }

View File

@ -17,11 +17,12 @@
package org.apache.poi.hssf.record; package org.apache.poi.hssf.record;
import org.apache.poi.hssf.record.UnicodeString.UnicodeRecordStats; import org.apache.poi.util.LittleEndianByteArrayOutputStream;
import org.apache.poi.util.LittleEndian; import org.apache.poi.util.LittleEndianOutput;
import org.apache.poi.util.StringUtil;
/** /**
* Title: Sup Book (EXTERNALBOOK) <P> * Title: Sup Book - EXTERNALBOOK (0x01AE) <p/>
* Description: A External Workbook Description (Supplemental Book) * Description: A External Workbook Description (Supplemental Book)
* Its only a dummy record for making new ExternSheet Record <P> * Its only a dummy record for making new ExternSheet Record <P>
* REFERENCE: 5.38<P> * REFERENCE: 5.38<P>
@ -31,25 +32,25 @@ import org.apache.poi.util.LittleEndian;
*/ */
public final class SupBookRecord extends Record { public final class SupBookRecord extends Record {
public final static short sid = 0x1AE; public final static short sid = 0x01AE;
private static final short SMALL_RECORD_SIZE = 4; private static final short SMALL_RECORD_SIZE = 4;
private static final short TAG_INTERNAL_REFERENCES = 0x0401; private static final short TAG_INTERNAL_REFERENCES = 0x0401;
private static final short TAG_ADD_IN_FUNCTIONS = 0x3A01; private static final short TAG_ADD_IN_FUNCTIONS = 0x3A01;
private short field_1_number_of_sheets; private short field_1_number_of_sheets;
private UnicodeString field_2_encoded_url; private String field_2_encoded_url;
private UnicodeString[] field_3_sheet_names; private String[] field_3_sheet_names;
private boolean _isAddInFunctions; private boolean _isAddInFunctions;
public static SupBookRecord createInternalReferences(short numberOfSheets) { public static SupBookRecord createInternalReferences(short numberOfSheets) {
return new SupBookRecord(false, numberOfSheets); return new SupBookRecord(false, numberOfSheets);
} }
public static SupBookRecord createAddInFunctions() { public static SupBookRecord createAddInFunctions() {
return new SupBookRecord(true, (short)0); return new SupBookRecord(true, (short)0);
} }
public static SupBookRecord createExternalReferences(UnicodeString url, UnicodeString[] sheetNames) { public static SupBookRecord createExternalReferences(String url, String[] sheetNames) {
return new SupBookRecord(url, sheetNames); return new SupBookRecord(url, sheetNames);
} }
private SupBookRecord(boolean isAddInFuncs, short numberOfSheets) { private SupBookRecord(boolean isAddInFuncs, short numberOfSheets) {
@ -59,7 +60,7 @@ public final class SupBookRecord extends Record {
field_3_sheet_names = null; field_3_sheet_names = null;
_isAddInFunctions = isAddInFuncs; _isAddInFunctions = isAddInFuncs;
} }
public SupBookRecord(UnicodeString url, UnicodeString[] sheetNames) { public SupBookRecord(String url, String[] sheetNames) {
field_1_number_of_sheets = (short) sheetNames.length; field_1_number_of_sheets = (short) sheetNames.length;
field_2_encoded_url = url; field_2_encoded_url = url;
field_3_sheet_names = sheetNames; field_3_sheet_names = sheetNames;
@ -84,18 +85,18 @@ public final class SupBookRecord extends Record {
* @param offset of the record's data (provided a big array of the file) * @param offset of the record's data (provided a big array of the file)
*/ */
public SupBookRecord(RecordInputStream in) { public SupBookRecord(RecordInputStream in) {
int recLen = in.remaining(); int recLen = in.remaining();
field_1_number_of_sheets = in.readShort(); field_1_number_of_sheets = in.readShort();
if(recLen > SMALL_RECORD_SIZE) { if(recLen > SMALL_RECORD_SIZE) {
// 5.38.1 External References // 5.38.1 External References
_isAddInFunctions = false; _isAddInFunctions = false;
field_2_encoded_url = in.readUnicodeString(); field_2_encoded_url = in.readString();
UnicodeString[] sheetNames = new UnicodeString[field_1_number_of_sheets]; String[] sheetNames = new String[field_1_number_of_sheets];
for (int i = 0; i < sheetNames.length; i++) { for (int i = 0; i < sheetNames.length; i++) {
sheetNames[i] = in.readUnicodeString(); sheetNames[i] = in.readString();
} }
field_3_sheet_names = sheetNames; field_3_sheet_names = sheetNames;
return; return;
@ -103,7 +104,7 @@ public final class SupBookRecord extends Record {
// else not 'External References' // else not 'External References'
field_2_encoded_url = null; field_2_encoded_url = null;
field_3_sheet_names = null; field_3_sheet_names = null;
short nextShort = in.readShort(); short nextShort = in.readShort();
if(nextShort == TAG_INTERNAL_REFERENCES) { if(nextShort == TAG_INTERNAL_REFERENCES) {
// 5.38.2 'Internal References' // 5.38.2 'Internal References'
@ -116,7 +117,7 @@ public final class SupBookRecord extends Record {
+ field_1_number_of_sheets + ")"); + field_1_number_of_sheets + ")");
} }
} else { } else {
throw new RuntimeException("invalid EXTERNALBOOK code (" throw new RuntimeException("invalid EXTERNALBOOK code ("
+ Integer.toHexString(nextShort) + ")"); + Integer.toHexString(nextShort) + ")");
} }
} }
@ -124,7 +125,7 @@ public final class SupBookRecord extends Record {
public String toString() { public String toString() {
StringBuffer sb = new StringBuffer(); StringBuffer sb = new StringBuffer();
sb.append(getClass().getName()).append(" [SUPBOOK "); sb.append(getClass().getName()).append(" [SUPBOOK ");
if(isExternalReferences()) { if(isExternalReferences()) {
sb.append("External References"); sb.append("External References");
sb.append(" nSheets=").append(field_1_number_of_sheets); sb.append(" nSheets=").append(field_1_number_of_sheets);
@ -143,18 +144,14 @@ public final class SupBookRecord extends Record {
return SMALL_RECORD_SIZE; return SMALL_RECORD_SIZE;
} }
int sum = 2; // u16 number of sheets int sum = 2; // u16 number of sheets
UnicodeRecordStats urs = new UnicodeRecordStats();
field_2_encoded_url.getRecordSize(urs); sum += StringUtil.getEncodedSize(field_2_encoded_url);
sum += urs.recordSize;
for(int i=0; i<field_3_sheet_names.length; i++) { for(int i=0; i<field_3_sheet_names.length; i++) {
urs = new UnicodeRecordStats(); sum += StringUtil.getEncodedSize(field_3_sheet_names[i]);
field_3_sheet_names[i].getRecordSize(urs);
sum += urs.recordSize;
} }
return sum; return sum;
} }
/** /**
* called by the class that is responsible for writing this sucker. * called by the class that is responsible for writing this sucker.
* Subclasses should implement this so that their data is passed back in a * Subclasses should implement this so that their data is passed back in a
@ -165,29 +162,26 @@ public final class SupBookRecord extends Record {
* @return number of bytes written * @return number of bytes written
*/ */
public int serialize(int offset, byte [] data) { public int serialize(int offset, byte [] data) {
LittleEndian.putShort(data, 0 + offset, sid);
int dataSize = getDataSize(); int dataSize = getDataSize();
LittleEndian.putShort(data, 2 + offset, (short) dataSize); int recordSize = 4 + dataSize;
LittleEndian.putShort(data, 4 + offset, field_1_number_of_sheets); LittleEndianOutput out = new LittleEndianByteArrayOutputStream(data, offset, recordSize);
out.writeShort(sid);
out.writeShort(dataSize);
out.writeShort(field_1_number_of_sheets);
if(isExternalReferences()) { if(isExternalReferences()) {
StringUtil.writeUnicodeString(out, field_2_encoded_url);
int currentOffset = 6 + offset;
UnicodeRecordStats urs = new UnicodeRecordStats();
field_2_encoded_url.serialize(urs, currentOffset, data);
currentOffset += urs.recordSize;
for(int i=0; i<field_3_sheet_names.length; i++) { for(int i=0; i<field_3_sheet_names.length; i++) {
urs = new UnicodeRecordStats(); StringUtil.writeUnicodeString(out, field_3_sheet_names[i]);
field_3_sheet_names[i].serialize(urs, currentOffset, data);
currentOffset += urs.recordSize;
} }
} else { } else {
short field2val = _isAddInFunctions ? TAG_ADD_IN_FUNCTIONS : TAG_INTERNAL_REFERENCES; int field2val = _isAddInFunctions ? TAG_ADD_IN_FUNCTIONS : TAG_INTERNAL_REFERENCES;
LittleEndian.putShort(data, 6 + offset, field2val); out.writeShort(field2val);
} }
return dataSize + 4; return recordSize;
} }
public void setNumberOfSheets(short number){ public void setNumberOfSheets(short number){
@ -203,7 +197,7 @@ public final class SupBookRecord extends Record {
return sid; return sid;
} }
public String getURL() { public String getURL() {
String encodedUrl = field_2_encoded_url.getString(); String encodedUrl = field_2_encoded_url;
switch(encodedUrl.charAt(0)) { switch(encodedUrl.charAt(0)) {
case 0: // Reference to an empty workbook name case 0: // Reference to an empty workbook name
return encodedUrl.substring(1); // will this just be empty string? return encodedUrl.substring(1); // will this just be empty string?
@ -211,7 +205,7 @@ public final class SupBookRecord extends Record {
return decodeFileName(encodedUrl); return decodeFileName(encodedUrl);
case 2: // Self-referential external reference case 2: // Self-referential external reference
return encodedUrl.substring(1); return encodedUrl.substring(1);
} }
return encodedUrl; return encodedUrl;
} }
@ -219,18 +213,18 @@ public final class SupBookRecord extends Record {
return encodedUrl.substring(1); return encodedUrl.substring(1);
// TODO the following special characters may appear in the rest of the string, and need to get interpreted // TODO the following special characters may appear in the rest of the string, and need to get interpreted
/* see "MICROSOFT OFFICE EXCEL 97-2007 BINARY FILE FORMAT SPECIFICATION" /* see "MICROSOFT OFFICE EXCEL 97-2007 BINARY FILE FORMAT SPECIFICATION"
chVolume 1 chVolume 1
chSameVolume 2 chSameVolume 2
chDownDir 3 chDownDir 3
chUpDir 4 chUpDir 4
chLongVolume 5 chLongVolume 5
chStartupDir 6 chStartupDir 6
chAltStartupDir 7 chAltStartupDir 7
chLibDir 8 chLibDir 8
*/ */
} }
public UnicodeString[] getSheetNames() { public String[] getSheetNames() {
return (UnicodeString[]) field_3_sheet_names.clone(); return (String[]) field_3_sheet_names.clone();
} }
} }

View File

@ -17,16 +17,13 @@
package org.apache.poi.hssf.record; package org.apache.poi.hssf.record;
import java.io.UnsupportedEncodingException; import org.apache.poi.hssf.record.cont.ContinuableRecord;
import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
import org.apache.poi.hssf.record.formula.Ptg; import org.apache.poi.hssf.record.formula.Ptg;
import org.apache.poi.hssf.usermodel.HSSFRichTextString; import org.apache.poi.hssf.usermodel.HSSFRichTextString;
import org.apache.poi.util.BitField; import org.apache.poi.util.BitField;
import org.apache.poi.util.BitFieldFactory; import org.apache.poi.util.BitFieldFactory;
import org.apache.poi.util.HexDump; import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianByteArrayOutputStream;
import org.apache.poi.util.LittleEndianOutput;
/** /**
* The TXO record (0x01B6) is used to define the properties of a text box. It is * The TXO record (0x01B6) is used to define the properties of a text box. It is
@ -36,7 +33,7 @@ import org.apache.poi.util.LittleEndianOutput;
* *
* @author Glen Stampoultzis (glens at apache.org) * @author Glen Stampoultzis (glens at apache.org)
*/ */
public final class TextObjectRecord extends Record { public final class TextObjectRecord extends ContinuableRecord {
public final static short sid = 0x01B6; public final static short sid = 0x01B6;
private static final int FORMAT_RUN_ENCODED_SIZE = 8; // 2 shorts and 4 bytes reserved private static final int FORMAT_RUN_ENCODED_SIZE = 8; // 2 shorts and 4 bytes reserved
@ -163,30 +160,7 @@ public final class TextObjectRecord extends Record {
return sid; return sid;
} }
/** private void serializeTXORecord(ContinuableRecordOutput out) {
* Only for the current record. does not include any subsequent Continue
* records
*/
private int getCurrentRecordDataSize() {
int result = 2 + 2 + 2 + 2 + 2 + 2 + 2 + 4;
if (_linkRefPtg != null) {
result += 2 // formula size
+ 4 // unknownInt
+_linkRefPtg.getSize();
if (_unknownPostFormulaByte != null) {
result += 1;
}
}
return result;
}
private int serializeTXORecord(int offset, byte[] data) {
int dataSize = getCurrentRecordDataSize();
int recSize = dataSize+4;
LittleEndianOutput out = new LittleEndianByteArrayOutputStream(data, offset, recSize);
out.writeShort(TextObjectRecord.sid);
out.writeShort(dataSize);
out.writeShort(field_1_options); out.writeShort(field_1_options);
out.writeShort(field_2_textOrientation); out.writeShort(field_2_textOrientation);
@ -206,79 +180,23 @@ public final class TextObjectRecord extends Record {
out.writeByte(_unknownPostFormulaByte.byteValue()); out.writeByte(_unknownPostFormulaByte.byteValue());
} }
} }
return recSize;
} }
private int serializeTrailingRecords(int offset, byte[] data) { private void serializeTrailingRecords(ContinuableRecordOutput out) {
byte[] textBytes; out.writeContinue();
try { out.writeStringData(_text.getString());
textBytes = _text.getString().getBytes("UTF-16LE"); out.writeContinue();
} catch (UnsupportedEncodingException e) { writeFormatData(out, _text);
throw new RuntimeException(e.getMessage(), e);
}
int remainingLength = textBytes.length;
int countTextBytesWritten = 0;
int pos = offset;
// (regardless what was read, we always serialize double-byte
// unicode characters (UTF-16LE).
Byte unicodeFlag = new Byte((byte)1);
while (remainingLength > 0) {
int chunkSize = Math.min(RecordInputStream.MAX_RECORD_DATA_SIZE - 2, remainingLength);
remainingLength -= chunkSize;
pos += ContinueRecord.write(data, pos, unicodeFlag, textBytes, countTextBytesWritten, chunkSize);
countTextBytesWritten += chunkSize;
}
byte[] formatData = createFormatData(_text);
pos += ContinueRecord.write(data, pos, null, formatData);
return pos - offset;
} }
private int getTrailingRecordsSize() { protected void serialize(ContinuableRecordOutput out) {
if (_text.length() < 1) {
return 0;
}
int encodedTextSize = 0;
int textBytesLength = _text.length() * LittleEndian.SHORT_SIZE;
while (textBytesLength > 0) {
int chunkSize = Math.min(RecordInputStream.MAX_RECORD_DATA_SIZE - 2, textBytesLength);
textBytesLength -= chunkSize;
encodedTextSize += 4; // +4 for ContinueRecord sid+size serializeTXORecord(out);
encodedTextSize += 1+chunkSize; // +1 for compressed unicode flag,
}
int encodedFormatSize = (_text.numFormattingRuns() + 1) * FORMAT_RUN_ENCODED_SIZE
+ 4; // +4 for ContinueRecord sid+size
return encodedTextSize + encodedFormatSize;
}
public int serialize(int offset, byte[] data) {
int expectedTotalSize = getRecordSize();
int totalSize = serializeTXORecord(offset, data);
if (_text.getString().length() > 0) { if (_text.getString().length() > 0) {
totalSize += serializeTrailingRecords(offset+totalSize, data); serializeTrailingRecords(out);
} }
if (totalSize != expectedTotalSize)
throw new RecordFormatException(totalSize
+ " bytes written but getRecordSize() reports " + expectedTotalSize);
return totalSize;
} }
/**
* Note - this total size includes all potential {@link ContinueRecord}s written
* but it is not the "ushort size" value to be written at the start of the first BIFF record
*/
protected int getDataSize() {
return getCurrentRecordDataSize() + getTrailingRecordsSize();
}
private int getFormattingDataLength() { private int getFormattingDataLength() {
if (_text.length() < 1) { if (_text.length() < 1) {
// important - no formatting data if text is empty // important - no formatting data if text is empty
@ -287,25 +205,17 @@ public final class TextObjectRecord extends Record {
return (_text.numFormattingRuns() + 1) * FORMAT_RUN_ENCODED_SIZE; return (_text.numFormattingRuns() + 1) * FORMAT_RUN_ENCODED_SIZE;
} }
private static byte[] createFormatData(HSSFRichTextString str) { private static void writeFormatData(ContinuableRecordOutput out , HSSFRichTextString str) {
int nRuns = str.numFormattingRuns(); int nRuns = str.numFormattingRuns();
byte[] result = new byte[(nRuns + 1) * FORMAT_RUN_ENCODED_SIZE];
int pos = 0;
for (int i = 0; i < nRuns; i++) { for (int i = 0; i < nRuns; i++) {
LittleEndian.putUShort(result, pos, str.getIndexOfFormattingRun(i)); out.writeShort(str.getIndexOfFormattingRun(i));
pos += 2;
int fontIndex = str.getFontOfFormattingRun(i); int fontIndex = str.getFontOfFormattingRun(i);
LittleEndian.putUShort(result, pos, fontIndex == str.NO_FONT ? 0 : fontIndex); out.writeShort(fontIndex == str.NO_FONT ? 0 : fontIndex);
pos += 2; out.writeInt(0); // skip reserved
pos += 4; // skip reserved
} }
LittleEndian.putUShort(result, pos, str.length()); out.writeShort(str.length());
pos += 2; out.writeShort(0);
LittleEndian.putUShort(result, pos, 0); out.writeInt(0); // skip reserved
pos += 2;
pos += 4; // skip reserved
return result;
} }
/** /**

View File

@ -17,75 +17,84 @@
package org.apache.poi.hssf.record; package org.apache.poi.hssf.record;
import org.apache.poi.util.BitField;
import org.apache.poi.util.BitFieldFactory;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.HexDump;
import java.util.Iterator;
import java.util.List;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
import org.apache.poi.util.BitField;
import org.apache.poi.util.BitFieldFactory;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndianInput;
import org.apache.poi.util.LittleEndianOutput;
/** /**
* Title: Unicode String<P> * Title: Unicode String<p/>
* Description: Unicode String record. We implement these as a record, although * Description: Unicode String - just standard fields that are in several records.
* they are really just standard fields that are in several records. * It is considered more desirable then repeating it in all of them.<p/>
* It is considered more desirable then repeating it in all of them.<P> * REFERENCE: PG 264 Microsoft Excel 97 Developer's Kit (ISBN: 1-57231-498-2)<p/>
* REFERENCE: PG 264 Microsoft Excel 97 Developer's Kit (ISBN: 1-57231-498-2)<P>
* @author Andrew C. Oliver * @author Andrew C. Oliver
* @author Marc Johnson (mjohnson at apache dot org) * @author Marc Johnson (mjohnson at apache dot org)
* @author Glen Stampoultzis (glens at apache.org) * @author Glen Stampoultzis (glens at apache.org)
*/ */
public final class UnicodeString implements Comparable { public final class UnicodeString implements Comparable {
private short field_1_charCount; // = 0; private short field_1_charCount;
private byte field_2_optionflags; // = 0; private byte field_2_optionflags;
private String field_3_string; // = null; private String field_3_string;
private List field_4_format_runs; private List field_4_format_runs;
private byte[] field_5_ext_rst; private byte[] field_5_ext_rst;
private static final BitField highByte = BitFieldFactory.getInstance(0x1); private static final BitField highByte = BitFieldFactory.getInstance(0x1);
private static final BitField extBit = BitFieldFactory.getInstance(0x4); private static final BitField extBit = BitFieldFactory.getInstance(0x4);
private static final BitField richText = BitFieldFactory.getInstance(0x8); private static final BitField richText = BitFieldFactory.getInstance(0x8);
public static class FormatRun implements Comparable { public static class FormatRun implements Comparable {
short character; short character;
short fontIndex; short fontIndex;
public FormatRun(short character, short fontIndex) { public FormatRun(short character, short fontIndex) {
this.character = character; this.character = character;
this.fontIndex = fontIndex; this.fontIndex = fontIndex;
}
public short getCharacterPos() {
return character;
}
public short getFontIndex() {
return fontIndex;
}
public boolean equals(Object o) {
if ((o == null) || (o.getClass() != this.getClass()))
{
return false;
} }
FormatRun other = ( FormatRun ) o;
return ((character == other.character) && (fontIndex == other.fontIndex)); public FormatRun(LittleEndianInput in) {
} this(in.readShort(), in.readShort());
}
public int compareTo(Object obj) { public short getCharacterPos() {
FormatRun r = (FormatRun)obj; return character;
if ((character == r.character) && (fontIndex == r.fontIndex)) }
return 0;
if (character == r.character)
return fontIndex - r.fontIndex;
else return character - r.character;
}
public String toString() { public short getFontIndex() {
return "character="+character+",fontIndex="+fontIndex; return fontIndex;
} }
public boolean equals(Object o) {
if (!(o instanceof FormatRun)) {
return false;
}
FormatRun other = ( FormatRun ) o;
return character == other.character && fontIndex == other.fontIndex;
}
public int compareTo(Object obj) {
FormatRun r = (FormatRun)obj;
if ((character == r.character) && (fontIndex == r.fontIndex))
return 0;
if (character == r.character)
return fontIndex - r.fontIndex;
else return character - r.character;
}
public String toString() {
return "character="+character+",fontIndex="+fontIndex;
}
public void serialize(LittleEndianOutput out) {
out.writeShort(character);
out.writeShort(fontIndex);
}
} }
private UnicodeString() { private UnicodeString() {
@ -116,13 +125,12 @@ public final class UnicodeString implements Comparable {
*/ */
public boolean equals(Object o) public boolean equals(Object o)
{ {
if ((o == null) || (o.getClass() != this.getClass())) if (!(o instanceof UnicodeString)) {
{
return false; return false;
} }
UnicodeString other = ( UnicodeString ) o; UnicodeString other = (UnicodeString) o;
//Ok lets do this in stages to return a quickly, first check the actual string //OK lets do this in stages to return a quickly, first check the actual string
boolean eq = ((field_1_charCount == other.field_1_charCount) boolean eq = ((field_1_charCount == other.field_1_charCount)
&& (field_2_optionflags == other.field_2_optionflags) && (field_2_optionflags == other.field_2_optionflags)
&& field_3_string.equals(other.field_3_string)); && field_3_string.equals(other.field_3_string));
@ -148,7 +156,7 @@ public final class UnicodeString implements Comparable {
if (!run1.equals(run2)) if (!run1.equals(run2))
return false; return false;
} }
//Well the format runs are equal as well!, better check the ExtRst data //Well the format runs are equal as well!, better check the ExtRst data
//Which by the way we dont know how to decode! //Which by the way we dont know how to decode!
@ -194,19 +202,17 @@ public final class UnicodeString implements Comparable {
boolean isCompressed = ((field_2_optionflags & 1) == 0); boolean isCompressed = ((field_2_optionflags & 1) == 0);
if (isCompressed) { if (isCompressed) {
field_3_string = in.readCompressedUnicode(field_1_charCount); field_3_string = in.readCompressedUnicode(field_1_charCount);
} else { } else {
field_3_string = in.readUnicodeLEString(field_1_charCount); field_3_string = in.readUnicodeLEString(field_1_charCount);
} }
if (isRichText() && (runCount > 0)) { if (isRichText() && (runCount > 0)) {
field_4_format_runs = new ArrayList(runCount); field_4_format_runs = new ArrayList(runCount);
for (int i=0;i<runCount;i++) { for (int i=0;i<runCount;i++) {
field_4_format_runs.add(new FormatRun(in.readShort(), in.readShort())); field_4_format_runs.add(new FormatRun(in));
//read reserved }
//in.readInt();
}
} }
if (isExtendedText() && (extensionLength > 0)) { if (isExtendedText() && (extensionLength > 0)) {
@ -372,11 +378,8 @@ public final class UnicodeString implements Comparable {
field_2_optionflags = richText.clearByte(field_2_optionflags); field_2_optionflags = richText.clearByte(field_2_optionflags);
} }
public byte[] getExtendedRst() {
return this.field_5_ext_rst;
}
public void setExtendedRst(byte[] ext_rst) { void setExtendedRst(byte[] ext_rst) {
if (ext_rst != null) if (ext_rst != null)
field_2_optionflags = extBit.setByte(field_2_optionflags); field_2_optionflags = extBit.setByte(field_2_optionflags);
else field_2_optionflags = extBit.clearByte(field_2_optionflags); else field_2_optionflags = extBit.clearByte(field_2_optionflags);
@ -391,13 +394,13 @@ public final class UnicodeString implements Comparable {
* removed / re-ordered * removed / re-ordered
*/ */
public void swapFontUse(short oldFontIndex, short newFontIndex) { public void swapFontUse(short oldFontIndex, short newFontIndex) {
Iterator i = field_4_format_runs.iterator(); Iterator i = field_4_format_runs.iterator();
while(i.hasNext()) { while(i.hasNext()) {
FormatRun run = (FormatRun)i.next(); FormatRun run = (FormatRun)i.next();
if(run.fontIndex == oldFontIndex) { if(run.fontIndex == oldFontIndex) {
run.fontIndex = newFontIndex; run.fontIndex = newFontIndex;
} }
} }
} }
/** /**
@ -442,353 +445,45 @@ public final class UnicodeString implements Comparable {
return buffer.toString(); return buffer.toString();
} }
private int writeContinueIfRequired(UnicodeRecordStats stats, final int requiredSize, int offset, byte[] data) { public void serialize(ContinuableRecordOutput out) {
//Basic string overhead int numberOfRichTextRuns = 0;
if (stats.remainingSize < requiredSize) { int extendedDataSize = 0;
//Check if be are already in a continue record, if so make sure that if (isRichText() && field_4_format_runs != null) {
//we go back and write out our length numberOfRichTextRuns = field_4_format_runs.size();
if (stats.lastLengthPos != -1) { }
short lastRecordLength = (short)(offset - stats.lastLengthPos - 2); if (isExtendedText() && field_5_ext_rst != null) {
if (lastRecordLength > 8224) extendedDataSize = field_5_ext_rst.length;
throw new InternalError(); }
LittleEndian.putShort(data, stats.lastLengthPos, lastRecordLength);
out.writeString(field_3_string, numberOfRichTextRuns, extendedDataSize);
if (numberOfRichTextRuns > 0) {
//This will ensure that a run does not split a continue
for (int i=0;i<numberOfRichTextRuns;i++) {
if (out.getAvailableSpace() < 4) {
out.writeContinue();
}
FormatRun r = (FormatRun)field_4_format_runs.get(i);
r.serialize(out);
}
} }
LittleEndian.putShort(data, offset, ContinueRecord.sid); if (extendedDataSize > 0) {
offset+=2; // OK ExtRst is actually not documented, so i am going to hope
//Record the location of the last continue length position, but don't write // that we can actually continue on byte boundaries
//anything there yet (since we don't know what it will be!)
stats.lastLengthPos = offset;
offset += 2;
stats.recordSize += 4; int extPos = 0;
stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4; while (true) {
} int nBytesToWrite = Math.min(extendedDataSize - extPos, out.getAvailableSpace());
return offset; out.write(field_5_ext_rst, extPos, nBytesToWrite);
} extPos += nBytesToWrite;
if (extPos >= extendedDataSize) {
public int serialize(UnicodeRecordStats stats, final int offset, byte [] data) break;
{ }
int pos = offset; out.writeContinue();
//Basic string overhead
pos = writeContinueIfRequired(stats, 3, pos, data);
LittleEndian.putShort(data, pos, getCharCount());
pos += 2;
data[ pos ] = getOptionFlags();
pos += 1;
stats.recordSize += 3;
stats.remainingSize-= 3;
if (isRichText()) {
if (field_4_format_runs != null) {
pos = writeContinueIfRequired(stats, 2, pos, data);
LittleEndian.putShort(data, pos, (short) field_4_format_runs.size());
pos += 2;
stats.recordSize += 2;
stats.remainingSize -= 2;
}
}
if ( isExtendedText() )
{
if (this.field_5_ext_rst != null) {
pos = writeContinueIfRequired(stats, 4, pos, data);
LittleEndian.putInt(data, pos, field_5_ext_rst.length);
pos += 4;
stats.recordSize += 4;
stats.remainingSize -= 4;
}
}
int charsize = isUncompressedUnicode() ? 2 : 1;
int strSize = (getString().length() * charsize);
byte[] strBytes = null;
try {
String unicodeString = getString();
if (!isUncompressedUnicode())
{
strBytes = unicodeString.getBytes("ISO-8859-1");
}
else
{
strBytes = unicodeString.getBytes("UTF-16LE");
} }
} }
catch (Exception e) {
throw new InternalError();
}
if (strSize != strBytes.length)
throw new InternalError("That shouldnt have happened!");
//Check to see if the offset occurs mid string, if so then we need to add
//the byte to start with that represents the first byte of the continue record.
if (strSize > stats.remainingSize) {
//OK the offset occurs half way through the string, that means that
//we need an extra byte after the continue record ie we didnt finish
//writing out the string the 1st time through
//But hang on, how many continue records did we span? What if this is
//a REALLY long string. We need to work this all out.
int amountThatCantFit = strSize;
int strPos = 0;
while (amountThatCantFit > 0) {
int amountWritten = Math.min(stats.remainingSize, amountThatCantFit);
//Make sure that the amount that can't fit takes into account
//whether we are writing double byte unicode
if (isUncompressedUnicode()) {
//We have the '-1' here because whether this is the first record or
//subsequent continue records, there is always the case that the
//number of bytes in a string on double byte boundaries is actually odd.
if ( ( (amountWritten ) % 2) == 1)
amountWritten--;
}
System.arraycopy(strBytes, strPos, data, pos, amountWritten);
pos += amountWritten;
strPos += amountWritten;
stats.recordSize += amountWritten;
stats.remainingSize -= amountWritten;
//Ok lets subtract what we can write
amountThatCantFit -= amountWritten;
//Each iteration of this while loop is another continue record, unless
//everything now fits.
if (amountThatCantFit > 0) {
//We know that a continue WILL be requied, but use this common method
pos = writeContinueIfRequired(stats, amountThatCantFit, pos, data);
//The first byte after a continue mid string is the extra byte to
//indicate if this run is compressed or not.
data[pos] = (byte) (isUncompressedUnicode() ? 0x1 : 0x0);
pos++;
stats.recordSize++;
stats.remainingSize --;
}
}
} else {
if (strSize > (data.length-pos))
System.out.println("Hmm shouldnt happen");
//Ok the string fits nicely in the remaining size
System.arraycopy(strBytes, 0, data, pos, strSize);
pos += strSize;
stats.recordSize += strSize;
stats.remainingSize -= strSize;
}
if (isRichText() && (field_4_format_runs != null)) {
int count = field_4_format_runs.size();
//This will ensure that a run does not split a continue
for (int i=0;i<count;i++) {
pos = writeContinueIfRequired(stats, 4, pos, data);
FormatRun r = (FormatRun)field_4_format_runs.get(i);
LittleEndian.putShort(data, pos, r.character);
pos += 2;
LittleEndian.putShort(data, pos, r.fontIndex);
pos += 2;
//Each run count is four bytes
stats.recordSize += 4;
stats.remainingSize -=4;
}
}
if (isExtendedText() && (field_5_ext_rst != null)) {
//Ok ExtRst is actually not documented, so i am going to hope
//that we can actually continue on byte boundaries
int ammountThatCantFit = field_5_ext_rst.length - stats.remainingSize;
int extPos = 0;
if (ammountThatCantFit > 0) {
while (ammountThatCantFit > 0) {
//So for this record we have already written
int ammountWritten = Math.min(stats.remainingSize, ammountThatCantFit);
System.arraycopy(field_5_ext_rst, extPos, data, pos, ammountWritten);
pos += ammountWritten;
extPos += ammountWritten;
stats.recordSize += ammountWritten;
stats.remainingSize -= ammountWritten;
//Ok lets subtract what we can write
ammountThatCantFit -= ammountWritten;
if (ammountThatCantFit > 0) {
pos = writeContinueIfRequired(stats, 1, pos, data);
}
}
} else {
//We can fit wholey in what remains.
System.arraycopy(field_5_ext_rst, 0, data, pos, field_5_ext_rst.length);
pos += field_5_ext_rst.length;
stats.remainingSize -= field_5_ext_rst.length;
stats.recordSize += field_5_ext_rst.length;
}
}
return pos - offset;
}
public void setCompressedUnicode() {
field_2_optionflags = highByte.setByte(field_2_optionflags);
}
public void setUncompressedUnicode() {
field_2_optionflags = highByte.clearByte(field_2_optionflags);
}
private boolean isUncompressedUnicode()
{
return highByte.isSet(getOptionFlags());
}
/** Returns the size of this record, given the amount of record space
* remaining, it will also include the size of writing a continue record.
*/
public static class UnicodeRecordStats {
public int recordSize;
public int remainingSize = SSTRecord.MAX_RECORD_SIZE;
public int lastLengthPos = -1;
}
public void getRecordSize(UnicodeRecordStats stats) {
//Basic string overhead
if (stats.remainingSize < 3) {
//Needs a continue
stats.recordSize += 4;
stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
}
stats.recordSize += 3;
stats.remainingSize-= 3;
//Read the number of rich runs if rich text.
if ( isRichText() )
{
//Run count
if (stats.remainingSize < 2) {
//Needs a continue
//Reset the available space.
stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
//continue record overhead
stats.recordSize+=4;
}
stats.recordSize += 2;
stats.remainingSize -=2;
}
//Read the size of extended data if present.
if ( isExtendedText() )
{
//Needs a continue
//extension length
if (stats.remainingSize < 4) {
//Reset the available space.
stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
//continue record overhead
stats.recordSize+=4;
}
stats.recordSize += 4;
stats.remainingSize -=4;
}
int charsize = isUncompressedUnicode() ? 2 : 1;
int strSize = (getString().length() * charsize);
//Check to see if the offset occurs mid string, if so then we need to add
//the byte to start with that represents the first byte of the continue record.
if (strSize > stats.remainingSize) {
//Ok the offset occurs half way through the string, that means that
//we need an extra byte after the continue record ie we didnt finish
//writing out the string the 1st time through
//But hang on, how many continue records did we span? What if this is
//a REALLY long string. We need to work this all out.
int ammountThatCantFit = strSize;
while (ammountThatCantFit > 0) {
int ammountWritten = Math.min(stats.remainingSize, ammountThatCantFit);
//Make sure that the ammount that cant fit takes into account
//whether we are writing double byte unicode
if (isUncompressedUnicode()) {
//We have the '-1' here because whether this is the first record or
//subsequent continue records, there is always the case that the
//number of bytes in a string on doube byte boundaries is actually odd.
if ( ( (ammountWritten) % 2) == 1)
ammountWritten--;
}
stats.recordSize += ammountWritten;
stats.remainingSize -= ammountWritten;
//Ok lets subtract what we can write
ammountThatCantFit -= ammountWritten;
//Each iteration of this while loop is another continue record, unless
//everything now fits.
if (ammountThatCantFit > 0) {
//Reset the available space.
stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
//continue record overhead
stats.recordSize+=4;
//The first byte after a continue mid string is the extra byte to
//indicate if this run is compressed or not.
stats.recordSize++;
stats.remainingSize --;
}
}
} else {
//Ok the string fits nicely in the remaining size
stats.recordSize += strSize;
stats.remainingSize -= strSize;
}
if (isRichText() && (field_4_format_runs != null)) {
int count = field_4_format_runs.size();
//This will ensure that a run does not split a continue
for (int i=0;i<count;i++) {
if (stats.remainingSize < 4) {
//Reset the available space.
stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
//continue record overhead
stats.recordSize+=4;
}
//Each run count is four bytes
stats.recordSize += 4;
stats.remainingSize -=4;
}
}
if (isExtendedText() && (field_5_ext_rst != null)) {
//Ok ExtRst is actually not documented, so i am going to hope
//that we can actually continue on byte boundaries
int ammountThatCantFit = field_5_ext_rst.length - stats.remainingSize;
if (ammountThatCantFit > 0) {
while (ammountThatCantFit > 0) {
//So for this record we have already written
int ammountWritten = Math.min(stats.remainingSize, ammountThatCantFit);
stats.recordSize += ammountWritten;
stats.remainingSize -= ammountWritten;
//Ok lets subtract what we can write
ammountThatCantFit -= ammountWritten;
if (ammountThatCantFit > 0) {
//Each iteration of this while loop is another continue record.
//Reset the available space.
stats.remainingSize = SSTRecord.MAX_RECORD_SIZE-4;
//continue record overhead
stats.recordSize += 4;
}
}
} else {
//We can fit wholey in what remains.
stats.remainingSize -= field_5_ext_rst.length;
stats.recordSize += field_5_ext_rst.length;
}
}
} }
public int compareTo(Object obj) public int compareTo(Object obj)
@ -801,9 +496,9 @@ public final class UnicodeString implements Comparable {
if (result != 0) if (result != 0)
return result; return result;
//Ok string appears to be equal but now lets compare formatting runs //OK string appears to be equal but now lets compare formatting runs
if ((field_4_format_runs == null) && (str.field_4_format_runs == null)) if ((field_4_format_runs == null) && (str.field_4_format_runs == null))
//Strings are equal, and there are no formtting runs. //Strings are equal, and there are no formatting runs.
return 0; return 0;
if ((field_4_format_runs == null) && (str.field_4_format_runs != null)) if ((field_4_format_runs == null) && (str.field_4_format_runs != null))
@ -850,12 +545,12 @@ public final class UnicodeString implements Comparable {
return 0; return 0;
} }
public boolean isRichText() private boolean isRichText()
{ {
return richText.isSet(getOptionFlags()); return richText.isSet(getOptionFlags());
} }
public boolean isExtendedText() private boolean isExtendedText()
{ {
return extBit.isSet(getOptionFlags()); return extBit.isSet(getOptionFlags());
} }
@ -877,10 +572,8 @@ public final class UnicodeString implements Comparable {
str.field_5_ext_rst = new byte[field_5_ext_rst.length]; str.field_5_ext_rst = new byte[field_5_ext_rst.length];
System.arraycopy(field_5_ext_rst, 0, str.field_5_ext_rst, 0, System.arraycopy(field_5_ext_rst, 0, str.field_5_ext_rst, 0,
field_5_ext_rst.length); field_5_ext_rst.length);
} }
return str; return str;
} }
} }

View File

@ -17,8 +17,6 @@
package org.apache.poi.hssf.record.constant; package org.apache.poi.hssf.record.constant;
import org.apache.poi.hssf.record.UnicodeString;
import org.apache.poi.hssf.record.UnicodeString.UnicodeRecordStats;
import org.apache.poi.util.LittleEndianInput; import org.apache.poi.util.LittleEndianInput;
import org.apache.poi.util.LittleEndianOutput; import org.apache.poi.util.LittleEndianOutput;
import org.apache.poi.util.StringUtil; import org.apache.poi.util.StringUtil;
@ -65,7 +63,7 @@ public final class ConstantValueParser {
case TYPE_NUMBER: case TYPE_NUMBER:
return new Double(in.readDouble()); return new Double(in.readDouble());
case TYPE_STRING: case TYPE_STRING:
return new UnicodeString(StringUtil.readUnicodeString(in)); return StringUtil.readUnicodeString(in);
case TYPE_BOOLEAN: case TYPE_BOOLEAN:
return readBoolean(in); return readBoolean(in);
case TYPE_ERROR_CODE: case TYPE_ERROR_CODE:
@ -111,10 +109,8 @@ public final class ConstantValueParser {
if(cls == Boolean.class || cls == Double.class || cls == ErrorConstant.class) { if(cls == Boolean.class || cls == Double.class || cls == ErrorConstant.class) {
return 8; return 8;
} }
UnicodeString strVal = (UnicodeString)object; String strVal = (String)object;
UnicodeRecordStats urs = new UnicodeRecordStats(); return StringUtil.getEncodedSize(strVal);
strVal.getRecordSize(urs);
return urs.recordSize;
} }
public static void encode(LittleEndianOutput out, Object[] values) { public static void encode(LittleEndianOutput out, Object[] values) {
@ -142,10 +138,10 @@ public final class ConstantValueParser {
out.writeDouble(dVal.doubleValue()); out.writeDouble(dVal.doubleValue());
return; return;
} }
if (value instanceof UnicodeString) { if (value instanceof String) {
UnicodeString usVal = (UnicodeString) value; String val = (String) value;
out.writeByte(TYPE_STRING); out.writeByte(TYPE_STRING);
StringUtil.writeUnicodeString(out, usVal.getString()); StringUtil.writeUnicodeString(out, val);
return; return;
} }
if (value instanceof ErrorConstant) { if (value instanceof ErrorConstant) {

View File

@ -0,0 +1,69 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.cont;
import org.apache.poi.hssf.record.ContinueRecord;
import org.apache.poi.hssf.record.Record;
import org.apache.poi.util.LittleEndianByteArrayOutputStream;
import org.apache.poi.util.LittleEndianOutput;
/**
* Common superclass of all records that can produce {@link ContinueRecord}s while being serialized.
*
* @author Josh Micich
*/
public abstract class ContinuableRecord extends Record {
protected ContinuableRecord() {
// no fields to initialise
}
/**
* Serializes this record's content to the supplied data output.<br/>
* The standard BIFF header (ushort sid, ushort size) has been handled by the superclass, so
* only BIFF data should be written by this method. Simple data types can be written with the
* standard {@link LittleEndianOutput} methods. Methods from {@link ContinuableRecordOutput}
* can be used to serialize strings (with {@link ContinueRecord}s being written as required).
* If necessary, implementors can explicitly start {@link ContinueRecord}s (regardless of the
* amount of remaining space).
*
* @param out a data output stream
*/
protected abstract void serialize(ContinuableRecordOutput out);
/**
* @return four less than the total length of the encoded record(s)
* (in the case when no {@link ContinueRecord} is needed, this is the
* same ushort value that gets encoded after the record sid
*/
protected final int getDataSize() {
ContinuableRecordOutput out = ContinuableRecordOutput.createForCountingOnly();
serialize(out);
out.terminate();
return out.getTotalSize() - 4;
}
public final int serialize(int offset, byte[] data) {
LittleEndianOutput leo = new LittleEndianByteArrayOutputStream(data, offset);
ContinuableRecordOutput out = new ContinuableRecordOutput(leo, getSid());
serialize(out);
out.terminate();
return out.getTotalSize();
}
}

View File

@ -0,0 +1,257 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.cont;
import org.apache.poi.hssf.record.ContinueRecord;
import org.apache.poi.util.DelayableLittleEndianOutput;
import org.apache.poi.util.LittleEndianOutput;
import org.apache.poi.util.StringUtil;
/**
* An augmented {@link LittleEndianOutput} used for serialization of {@link ContinuableRecord}s.
* This class keeps track of how much remaining space is available in the current BIFF record and
* can start new {@link ContinueRecord}s as required.
*
* @author Josh Micich
*/
public final class ContinuableRecordOutput implements LittleEndianOutput {
private final LittleEndianOutput _out;
private UnknownLengthRecordOutput _ulrOutput;
private int _totalPreviousRecordsSize;
ContinuableRecordOutput(LittleEndianOutput out, int sid) {
_ulrOutput = new UnknownLengthRecordOutput(out, sid);
_out = out;
_totalPreviousRecordsSize = 0;
}
public static ContinuableRecordOutput createForCountingOnly() {
return new ContinuableRecordOutput(NOPOutput, -777); // fake sid
}
/**
* @return total number of bytes written so far (including all BIFF headers)
*/
public int getTotalSize() {
return _totalPreviousRecordsSize + _ulrOutput.getTotalSize();
}
/**
* Terminates the last record (also updates its 'ushort size' field)
*/
void terminate() {
_ulrOutput.terminate();
}
/**
* @return number of remaining bytes of space in current record
*/
public int getAvailableSpace() {
return _ulrOutput.getAvailableSpace();
}
/**
* Terminates the current record and starts a new {@link ContinueRecord} (regardless
* of how much space is still available in the current record).
*/
public void writeContinue() {
_ulrOutput.terminate();
_totalPreviousRecordsSize += _ulrOutput.getTotalSize();
_ulrOutput = new UnknownLengthRecordOutput(_out, ContinueRecord.sid);
}
public void writeContinueIfRequired(int requiredContinuousSize) {
if (_ulrOutput.getAvailableSpace() < requiredContinuousSize) {
writeContinue();
}
}
/**
* Writes the 'optionFlags' byte and encoded character data of a unicode string. This includes:
* <ul>
* <li>byte optionFlags</li>
* <li>encoded character data (in "ISO-8859-1" or "UTF-16LE" encoding)</li>
* </ul>
*
* Notes:
* <ul>
* <li>The value of the 'is16bitEncoded' flag is determined by the actual character data
* of <tt>text</tt></li>
* <li>The string options flag is never separated (by a {@link ContinueRecord}) from the
* first chunk of character data it refers to.</li>
* <li>The 'ushort length' field is assumed to have been explicitly written earlier. Hence,
* there may be an intervening {@link ContinueRecord}</li>
* </ul>
*/
public void writeStringData(String text) {
boolean is16bitEncoded = StringUtil.hasMultibyte(text);
// calculate total size of the header and first encoded char
int keepTogetherSize = 1 + 1; // ushort len, at least one character byte
int optionFlags = 0x00;
if (is16bitEncoded) {
optionFlags |= 0x01;
keepTogetherSize += 1; // one extra byte for first char
}
writeContinueIfRequired(keepTogetherSize);
writeByte(optionFlags);
writeCharacterData(text, is16bitEncoded);
}
/**
* Writes a unicode string complete with header and character data. This includes:
* <ul>
* <li>ushort length</li>
* <li>byte optionFlags</li>
* <li>ushort numberOfRichTextRuns (optional)</li>
* <li>ushort extendedDataSize (optional)</li>
* <li>encoded character data (in "ISO-8859-1" or "UTF-16LE" encoding)</li>
* </ul>
*
* The following bits of the 'optionFlags' byte will be set as appropriate:
* <table border='1'>
* <tr><th>Mask</th><th>Description</th></tr>
* <tr><td>0x01</td><td>is16bitEncoded</td></tr>
* <tr><td>0x04</td><td>hasExtendedData</td></tr>
* <tr><td>0x08</td><td>isRichText</td></tr>
* </table>
* Notes:
* <ul>
* <li>The value of the 'is16bitEncoded' flag is determined by the actual character data
* of <tt>text</tt></li>
* <li>The string header fields are never separated (by a {@link ContinueRecord}) from the
* first chunk of character data (i.e. the first character is always encoded in the same
* record as the string header).</li>
* </ul>
*/
public void writeString(String text, int numberOfRichTextRuns, int extendedDataSize) {
boolean is16bitEncoded = StringUtil.hasMultibyte(text);
// calculate total size of the header and first encoded char
int keepTogetherSize = 2 + 1 + 1; // ushort len, byte optionFlags, at least one character byte
int optionFlags = 0x00;
if (is16bitEncoded) {
optionFlags |= 0x01;
keepTogetherSize += 1; // one extra byte for first char
}
if (numberOfRichTextRuns > 0) {
optionFlags |= 0x08;
keepTogetherSize += 2;
}
if (extendedDataSize > 0) {
optionFlags |= 0x04;
keepTogetherSize += 4;
}
writeContinueIfRequired(keepTogetherSize);
writeShort(text.length());
writeByte(optionFlags);
if (numberOfRichTextRuns > 0) {
writeShort(numberOfRichTextRuns);
}
if (extendedDataSize > 0) {
writeInt(extendedDataSize);
}
writeCharacterData(text, is16bitEncoded);
}
private void writeCharacterData(String text, boolean is16bitEncoded) {
int nChars = text.length();
int i=0;
if (is16bitEncoded) {
while(true) {
int nWritableChars = Math.min(nChars-i, _ulrOutput.getAvailableSpace() / 2);
for ( ; nWritableChars > 0; nWritableChars--) {
_ulrOutput.writeShort(text.charAt(i++));
}
if (i >= nChars) {
break;
}
writeContinue();
writeByte(0x01);
}
} else {
while(true) {
int nWritableChars = Math.min(nChars-i, _ulrOutput.getAvailableSpace() / 1);
for ( ; nWritableChars > 0; nWritableChars--) {
_ulrOutput.writeByte(text.charAt(i++));
}
if (i >= nChars) {
break;
}
writeContinue();
writeByte(0x00);
}
}
}
public void write(byte[] b) {
writeContinueIfRequired(b.length);
_ulrOutput.write(b);
}
public void write(byte[] b, int offset, int len) {
writeContinueIfRequired(len);
_ulrOutput.write(b, offset, len);
}
public void writeByte(int v) {
writeContinueIfRequired(1);
_ulrOutput.writeByte(v);
}
public void writeDouble(double v) {
writeContinueIfRequired(8);
_ulrOutput.writeDouble(v);
}
public void writeInt(int v) {
writeContinueIfRequired(4);
_ulrOutput.writeInt(v);
}
public void writeLong(long v) {
writeContinueIfRequired(8);
_ulrOutput.writeLong(v);
}
public void writeShort(int v) {
writeContinueIfRequired(2);
_ulrOutput.writeShort(v);
}
/**
* Allows optimised usage of {@link ContinuableRecordOutput} for sizing purposes only.
*/
private static final LittleEndianOutput NOPOutput = new DelayableLittleEndianOutput() {
public LittleEndianOutput createDelayedOutput(int size) {
return this;
}
public void write(byte[] b) {
// does nothing
}
public void write(byte[] b, int offset, int len) {
// does nothing
}
public void writeByte(int v) {
// does nothing
}
public void writeDouble(double v) {
// does nothing
}
public void writeInt(int v) {
// does nothing
}
public void writeLong(long v) {
// does nothing
}
public void writeShort(int v) {
// does nothing
}
};
}

View File

@ -0,0 +1,114 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.cont;
import org.apache.poi.hssf.record.RecordInputStream;
import org.apache.poi.util.DelayableLittleEndianOutput;
import org.apache.poi.util.LittleEndianByteArrayOutputStream;
import org.apache.poi.util.LittleEndianOutput;
/**
* Allows the writing of BIFF records when the 'ushort size' header field is not known in advance.
* When the client is finished writing data, it calls {@link #terminate()}, at which point this
* class updates the 'ushort size' with its final value.
*
* @author Josh Micich
*/
final class UnknownLengthRecordOutput implements LittleEndianOutput {
private static final int MAX_DATA_SIZE = RecordInputStream.MAX_RECORD_DATA_SIZE;
private final LittleEndianOutput _originalOut;
/** for writing the 'ushort size' field once its value is known */
private final LittleEndianOutput _dataSizeOutput;
private final byte[] _byteBuffer;
private LittleEndianOutput _out;
private int _size;
public UnknownLengthRecordOutput(LittleEndianOutput out, int sid) {
_originalOut = out;
out.writeShort(sid);
if (out instanceof DelayableLittleEndianOutput) {
// optimisation
DelayableLittleEndianOutput dleo = (DelayableLittleEndianOutput) out;
_dataSizeOutput = dleo.createDelayedOutput(2);
_byteBuffer = null;
_out = out;
} else {
// otherwise temporarily write all subsequent data to a buffer
_dataSizeOutput = out;
_byteBuffer = new byte[RecordInputStream.MAX_RECORD_DATA_SIZE];
_out = new LittleEndianByteArrayOutputStream(_byteBuffer, 0);
}
}
/**
* includes 4 byte header
*/
public int getTotalSize() {
return 4 + _size;
}
public int getAvailableSpace() {
if (_out == null) {
throw new IllegalStateException("Record already terminated");
}
return MAX_DATA_SIZE - _size;
}
/**
* Finishes writing the current record and updates 'ushort size' field.<br/>
* After this method is called, only {@link #getTotalSize()} may be called.
*/
public void terminate() {
if (_out == null) {
throw new IllegalStateException("Record already terminated");
}
_dataSizeOutput.writeShort(_size);
if (_byteBuffer != null) {
_originalOut.write(_byteBuffer, 0, _size);
_out = null;
return;
}
_out = null;
}
public void write(byte[] b) {
_out.write(b);
_size += b.length;
}
public void write(byte[] b, int offset, int len) {
_out.write(b, offset, len);
_size += len;
}
public void writeByte(int v) {
_out.writeByte(v);
_size += 1;
}
public void writeDouble(double v) {
_out.writeDouble(v);
_size += 8;
}
public void writeInt(int v) {
_out.writeInt(v);
_size += 4;
}
public void writeLong(long v) {
_out.writeLong(v);
_size += 8;
}
public void writeShort(int v) {
_out.writeShort(v);
_size += 2;
}
}

View File

@ -206,8 +206,8 @@ public final class ArrayPtg extends Ptg {
if (o == null) { if (o == null) {
throw new RuntimeException("Array item cannot be null"); throw new RuntimeException("Array item cannot be null");
} }
if (o instanceof UnicodeString) { if (o instanceof String) {
return "\"" + ((UnicodeString)o).getString() + "\""; return "\"" + (String)o + "\"";
} }
if (o instanceof Double) { if (o instanceof Double) {
return ((Double)o).toString(); return ((Double)o).toString();

View File

@ -43,7 +43,6 @@ import org.apache.poi.hssf.record.NumberRecord;
import org.apache.poi.hssf.record.ObjRecord; import org.apache.poi.hssf.record.ObjRecord;
import org.apache.poi.hssf.record.Record; import org.apache.poi.hssf.record.Record;
import org.apache.poi.hssf.record.RecordBase; import org.apache.poi.hssf.record.RecordBase;
import org.apache.poi.hssf.record.StringRecord;
import org.apache.poi.hssf.record.SubRecord; import org.apache.poi.hssf.record.SubRecord;
import org.apache.poi.hssf.record.TextObjectRecord; import org.apache.poi.hssf.record.TextObjectRecord;
import org.apache.poi.hssf.record.UnicodeString; import org.apache.poi.hssf.record.UnicodeString;
@ -257,7 +256,7 @@ public class HSSFCell implements Cell {
} }
public int getColumnIndex() { public int getColumnIndex() {
return record.getColumn() & 0xFFFF; return record.getColumn() & 0xFFFF;
} }
/** /**
@ -336,38 +335,23 @@ public class HSSFCell implements Cell {
break; break;
case CELL_TYPE_STRING : case CELL_TYPE_STRING :
LabelSSTRecord lrec = null; LabelSSTRecord lrec;
if (cellType != this.cellType) if (cellType == this.cellType) {
{ lrec = (LabelSSTRecord) record;
} else {
lrec = new LabelSSTRecord(); lrec = new LabelSSTRecord();
lrec.setColumn(col);
lrec.setRow(row);
lrec.setXFIndex(styleIndex);
} }
else if (setValue) {
{ String str = convertCellValueToString();
lrec = ( LabelSSTRecord ) record; int sstIndex = book.getWorkbook().addSSTString(new UnicodeString(str));
} lrec.setSSTIndex(sstIndex);
lrec.setColumn(col); UnicodeString us = book.getWorkbook().getSSTString(sstIndex);
lrec.setRow(row); stringValue = new HSSFRichTextString();
lrec.setXFIndex(styleIndex); stringValue.setUnicodeString(us);
if (setValue)
{
if ((getStringCellValue() != null)
&& (!getStringCellValue().equals("")))
{
int sst = 0;
UnicodeString str = getRichStringCellValue().getUnicodeString();
//jmh if (encoding == ENCODING_COMPRESSED_UNICODE)
//jmh {
// jmh str.setCompressedUnicode();
// jmh } else if (encoding == ENCODING_UTF_16)
// jmh {
// jmh str.setUncompressedUnicode();
// jmh }
sst = book.getWorkbook().addSSTString(str);
lrec.setSSTIndex(sst);
getRichStringCellValue().setUnicodeString(book.getWorkbook().getSSTString(sst));
}
} }
record = lrec; record = lrec;
break; break;
@ -782,7 +766,9 @@ public class HSSFCell implements Cell {
case CELL_TYPE_BOOLEAN: case CELL_TYPE_BOOLEAN:
return (( BoolErrRecord ) record).getBooleanValue(); return (( BoolErrRecord ) record).getBooleanValue();
case CELL_TYPE_STRING: case CELL_TYPE_STRING:
return Boolean.valueOf(((StringRecord)record).getString()).booleanValue(); int sstIndex = ((LabelSSTRecord)record).getSSTIndex();
String text = book.getWorkbook().getSSTString(sstIndex).getString();
return Boolean.valueOf(text).booleanValue();
case CELL_TYPE_NUMERIC: case CELL_TYPE_NUMERIC:
return ((NumberRecord)record).getValue() != 0; return ((NumberRecord)record).getValue() != 0;
@ -796,6 +782,26 @@ public class HSSFCell implements Cell {
} }
throw new RuntimeException("Unexpected cell type (" + cellType + ")"); throw new RuntimeException("Unexpected cell type (" + cellType + ")");
} }
private String convertCellValueToString() {
switch (cellType) {
case CELL_TYPE_BLANK:
return "";
case CELL_TYPE_BOOLEAN:
return ((BoolErrRecord) record).getBooleanValue() ? "TRUE" : "FALSE";
case CELL_TYPE_STRING:
int sstIndex = ((LabelSSTRecord)record).getSSTIndex();
return book.getWorkbook().getSSTString(sstIndex).getString();
case CELL_TYPE_NUMERIC:
return String.valueOf(((NumberRecord)record).getValue());
case CELL_TYPE_ERROR:
return HSSFErrorConstants.getText(((BoolErrRecord) record).getErrorValue());
case CELL_TYPE_FORMULA:
// should really evaluate, but HSSFCell can't call HSSFFormulaEvaluator
return "";
}
throw new RuntimeException("Unexpected cell type (" + cellType + ")");
}
/** /**
* get the value of the cell as a boolean. For strings, numbers, and errors, we throw an exception. * get the value of the cell as a boolean. For strings, numbers, and errors, we throw an exception.

View File

@ -0,0 +1,34 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.util;
/**
* Implementors of this interface allow client code to 'delay' writing to a certain section of a
* data output stream.<br/>
* A typical application is for writing BIFF records when the size is not known until well after
* the header has been written. The client code can call {@link #createDelayedOutput(int)}
* to reserve two bytes of the output for the 'ushort size' header field. The delayed output can
* be written at any stage.
*
* @author Josh Micich
*/
public interface DelayableLittleEndianOutput extends LittleEndianOutput {
/**
* Creates an output stream intended for outputting a sequence of <tt>size</tt> bytes.
*/
LittleEndianOutput createDelayedOutput(int size);
}

View File

@ -24,7 +24,7 @@ package org.apache.poi.util;
* *
* @author Josh Micich * @author Josh Micich
*/ */
public final class LittleEndianByteArrayOutputStream implements LittleEndianOutput { public final class LittleEndianByteArrayOutputStream implements LittleEndianOutput, DelayableLittleEndianOutput {
private final byte[] _buf; private final byte[] _buf;
private final int _endIndex; private final int _endIndex;
private int _writeIndex; private int _writeIndex;
@ -89,4 +89,10 @@ public final class LittleEndianByteArrayOutputStream implements LittleEndianOutp
public int getWriteIndex() { public int getWriteIndex() {
return _writeIndex; return _writeIndex;
} }
public LittleEndianOutput createDelayedOutput(int size) {
checkPosition(size);
LittleEndianOutput result = new LittleEndianByteArrayOutputStream(_buf, _writeIndex, _writeIndex+size);
_writeIndex += size;
return result;
}
} }

View File

@ -162,6 +162,15 @@ public class StringUtil {
} }
} }
/**
* @return the number of bytes that would be written by {@link #writeUnicodeString(LittleEndianOutput, String)}
*/
public static int getEncodedSize(String value) {
int result = 2 + 1;
result += value.length() * (StringUtil.hasMultibyte(value) ? 2 : 1);
return result;
}
/** /**
* Takes a unicode (java) string, and returns it as 8 bit data (in ISO-8859-1 * Takes a unicode (java) string, and returns it as 8 bit data (in ISO-8859-1
* codepage). * codepage).

View File

@ -196,10 +196,14 @@ public class Picture extends SimpleShape {
Document doc = ppt.getDocumentRecord(); Document doc = ppt.getDocumentRecord();
EscherContainerRecord dggContainer = doc.getPPDrawingGroup().getDggContainer(); EscherContainerRecord dggContainer = doc.getPPDrawingGroup().getDggContainer();
EscherContainerRecord bstore = (EscherContainerRecord)Shape.getEscherChild(dggContainer, EscherContainerRecord.BSTORE_CONTAINER); EscherContainerRecord bstore = (EscherContainerRecord)Shape.getEscherChild(dggContainer, EscherContainerRecord.BSTORE_CONTAINER);
if(bstore == null) {
logger.log(POILogger.DEBUG, "EscherContainerRecord.BSTORE_CONTAINER was not found ");
return null;
}
List lst = bstore.getChildRecords(); List lst = bstore.getChildRecords();
int idx = getPictureIndex(); int idx = getPictureIndex();
if (idx == 0){ if (idx == 0){
logger.log(POILogger.DEBUG, "picture index was not found, returning ");
return null; return null;
} else { } else {
return (EscherBSERecord)lst.get(idx-1); return (EscherBSERecord)lst.get(idx-1);
@ -263,7 +267,7 @@ public class Picture extends SimpleShape {
ShapePainter.paint(this, graphics); ShapePainter.paint(this, graphics);
PictureData data = getPictureData(); PictureData data = getPictureData();
data.draw(graphics, this); if(data != null) data.draw(graphics, this);
graphics.setTransform(at); graphics.setTransform(at);
} }

View File

@ -56,8 +56,8 @@ public class TableCell extends TextBox {
super(parent); super(parent);
setShapeType(ShapeTypes.Rectangle); setShapeType(ShapeTypes.Rectangle);
_txtrun.setRunType(TextHeaderAtom.HALF_BODY_TYPE); //_txtrun.setRunType(TextHeaderAtom.HALF_BODY_TYPE);
_txtrun.getRichTextRuns()[0].setFlag(false, 0, false); //_txtrun.getRichTextRuns()[0].setFlag(false, 0, false);
} }
protected EscherContainerRecord createSpContainer(boolean isChild){ protected EscherContainerRecord createSpContainer(boolean isChild){

View File

@ -20,9 +20,12 @@ import junit.framework.*;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.File; import java.io.File;
import java.io.IOException;
import java.awt.*; import java.awt.*;
import java.awt.image.BufferedImage;
import org.apache.poi.hslf.usermodel.SlideShow; import org.apache.poi.hslf.usermodel.SlideShow;
import org.apache.poi.hslf.usermodel.PictureData;
import org.apache.poi.hslf.HSLFSlideShow; import org.apache.poi.hslf.HSLFSlideShow;
import org.apache.poi.ddf.EscherBSERecord; import org.apache.poi.ddf.EscherBSERecord;
@ -70,4 +73,24 @@ public class TestPicture extends TestCase {
} }
/**
* Picture#getEscherBSERecord threw NullPointerException if EscherContainerRecord.BSTORE_CONTAINER
* was not found. The correct behaviour is to return null.
*/
public void test46122() throws IOException {
SlideShow ppt = new SlideShow();
Slide slide = ppt.createSlide();
Picture pict = new Picture(-1); //index to non-existing picture data
pict.setSheet(slide);
PictureData data = pict.getPictureData();
assertNull(data);
BufferedImage img = new BufferedImage(100, 100, BufferedImage.TYPE_INT_RGB);
Graphics2D graphics = img.createGraphics();
pict.draw(graphics);
assertTrue("no errors rendering Picture with null data", true);
}
} }

View File

@ -24,6 +24,7 @@ import java.awt.geom.Rectangle2D;
import org.apache.poi.hslf.usermodel.SlideShow; import org.apache.poi.hslf.usermodel.SlideShow;
import org.apache.poi.hslf.HSLFSlideShow; import org.apache.poi.hslf.HSLFSlideShow;
import org.apache.poi.hslf.record.TextHeaderAtom;
/** /**
* Test <code>Table</code> object. * Test <code>Table</code> object.
@ -43,6 +44,10 @@ public class TestTable extends TestCase {
Table tbl = new Table(2, 5); Table tbl = new Table(2, 5);
slide.addShape(tbl); slide.addShape(tbl);
TableCell cell = tbl.getCell(0, 0);
//table cells have type=TextHeaderAtom.OTHER_TYPE, see bug #46033
assertEquals(TextHeaderAtom.OTHER_TYPE, cell.getTextRun().getRunType());
assertTrue(slide.getShapes()[0] instanceof Table); assertTrue(slide.getShapes()[0] instanceof Table);
Table tbl2 = (Table)slide.getShapes()[0]; Table tbl2 = (Table)slide.getShapes()[0];
assertEquals(tbl.getNumberOfColumns(), tbl2.getNumberOfColumns()); assertEquals(tbl.getNumberOfColumns(), tbl2.getNumberOfColumns());

View File

@ -48,7 +48,6 @@ public final class TestRecordFactory extends TestCase {
byte[] data = { byte[] data = {
0, 6, 5, 0, -2, 28, -51, 7, -55, 64, 0, 0, 6, 1, 0, 0 0, 6, 5, 0, -2, 28, -51, 7, -55, 64, 0, 0, 6, 1, 0, 0
}; };
short size = 16;
Record[] record = RecordFactory.createRecord(TestcaseRecordInputStream.create(recType, data)); Record[] record = RecordFactory.createRecord(TestcaseRecordInputStream.create(recType, data));
assertEquals(BOFRecord.class.getName(), assertEquals(BOFRecord.class.getName(),
@ -64,7 +63,6 @@ public final class TestRecordFactory extends TestCase {
assertEquals(5, bofRecord.getType()); assertEquals(5, bofRecord.getType());
assertEquals(1536, bofRecord.getVersion()); assertEquals(1536, bofRecord.getVersion());
recType = MMSRecord.sid; recType = MMSRecord.sid;
size = 2;
data = new byte[] data = new byte[]
{ {
0, 0 0, 0
@ -93,7 +91,6 @@ public final class TestRecordFactory extends TestCase {
byte[] data = { byte[] data = {
0, 0, 0, 0, 21, 0, 0, 0, 0, 0 0, 0, 0, 0, 21, 0, 0, 0, 0, 0
}; };
short size = 10;
Record[] record = RecordFactory.createRecord(TestcaseRecordInputStream.create(recType, data)); Record[] record = RecordFactory.createRecord(TestcaseRecordInputStream.create(recType, data));
assertEquals(NumberRecord.class.getName(), assertEquals(NumberRecord.class.getName(),
@ -154,34 +151,34 @@ public final class TestRecordFactory extends TestCase {
*/ */
public void testMixedContinue() throws Exception { public void testMixedContinue() throws Exception {
/** /**
* Taken from a real test sample file 39512.xls. See Bug 39512 for details. * Adapted from a real test sample file 39512.xls (Offset 0x4854).
* See Bug 39512 for details.
*/ */
String dump = String dump =
//OBJ //OBJ
"5D, 00, 48, 00, 15, 00, 12, 00, 0C, 00, 3C, 00, 11, 00, A0, 2E, 03, 01, CC, 42, " + "5D 00 48 00 15 00 12 00 0C 00 3C 00 11 00 A0 2E 03 01 CC 42 " +
"CF, 00, 00, 00, 00, 00, 0A, 00, 0C, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, " + "CF 00 00 00 00 00 0A 00 0C 00 00 00 00 00 00 00 00 00 00 00 " +
"03, 00, 0B, 00, 06, 00, 28, 01, 03, 01, 00, 00, 12, 00, 08, 00, 00, 00, 00, 00, " + "03 00 0B 00 06 00 28 01 03 01 00 00 12 00 08 00 00 00 00 00 " +
"00, 00, 03, 00, 11, 00, 04, 00, 3D, 00, 00, 00, 00, 00, 00, 00, " + "00 00 03 00 11 00 04 00 3D 00 00 00 00 00 00 00 " +
//MSODRAWING //MSODRAWING
"EC, 00, 08, 00, 00, 00, 0D, F0, 00, 00, 00, 00, " + "EC 00 08 00 00 00 0D F0 00 00 00 00 " +
//TXO //TXO (and 2 trailing CONTINUE records)
"B6, 01, 12, 00, 22, 02, 00, 00, 00, 00, 00, 00, 00, 00, 10, 00, 10, 00, 00, 00, " + "B6 01 12 00 22 02 00 00 00 00 00 00 00 00 10 00 10 00 00 00 00 00 " +
"00, 00, 3C, 00, 21, 00, 01, 4F, 00, 70, 00, 74, 00, 69, 00, 6F, 00, 6E, 00, 20, " + "3C 00 11 00 00 4F 70 74 69 6F 6E 20 42 75 74 74 6F 6E 20 33 39 " +
"00, 42, 00, 75, 00, 74, 00, 74, 00, 6F, 00, 6E, 00, 20, 00, 33, 00, 39, 00, 3C, " + "3C 00 10 00 00 00 05 00 00 00 00 00 10 00 00 00 00 00 00 00 " +
"00, 10, 00, 00, 00, 05, 00, 00, 00, 00, 00, 10, 00, 00, 00, 00, 00, 00, 00, " + // another CONTINUE
//CONTINUE "3C 00 7E 00 0F 00 04 F0 7E 00 00 00 92 0C 0A F0 08 00 00 00 " +
"3C, 00, 7E, 00, 0F, 00, 04, F0, 7E, 00, 00, 00, 92, 0C, 0A, F0, 08, 00, 00, 00, " + "3D 04 00 00 00 0A 00 00 A3 00 0B F0 3C 00 00 00 7F 00 00 01 " +
"3D, 04, 00, 00, 00, 0A, 00, 00, A3, 00, 0B, F0, 3C, 00, 00, 00, 7F, 00, 00, 01, " + "00 01 80 00 8C 01 03 01 85 00 01 00 00 00 8B 00 02 00 00 00 " +
"00, 01, 80, 00, 8C, 01, 03, 01, 85, 00, 01, 00, 00, 00, 8B, 00, 02, 00, 00, 00, " + "BF 00 08 00 1A 00 7F 01 29 00 29 00 81 01 41 00 00 08 BF 01 " +
"BF, 00, 08, 00, 1A, 00, 7F, 01, 29, 00, 29, 00, 81, 01, 41, 00, 00, 08, BF, 01, " + "00 00 10 00 C0 01 40 00 00 08 FF 01 00 00 08 00 00 00 10 F0 " +
"00, 00, 10, 00, C0, 01, 40, 00, 00, 08, FF, 01, 00, 00, 08, 00, 00, 00, 10, F0, " + "12 00 00 00 02 00 02 00 A0 03 18 00 B5 00 04 00 30 02 1A 00 " +
"12, 00, 00, 00, 02, 00, 02, 00, A0, 03, 18, 00, B5, 00, 04, 00, 30, 02, 1A, 00, " + "00 00 00 00 11 F0 00 00 00 00 " +
"00, 00, 00, 00, 11, F0, 00, 00, 00, 00, " +
//OBJ //OBJ
"5D, 00, 48, 00, 15, 00, 12, 00, 0C, 00, 3D, 00, 11, 00, 8C, 01, 03, 01, C8, 59, CF, 00, 00, " + "5D 00 48 00 15 00 12 00 0C 00 3D 00 11 00 8C 01 03 01 C8 59 CF 00 00 " +
"00, 00, 00, 0A, 00, 0C, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 03, 00, 0B, 00, 06, 00, " + "00 00 00 0A 00 0C 00 00 00 00 00 00 00 00 00 00 00 03 00 0B 00 06 00 " +
"7C, 16, 03, 01, 00, 00, 12, 00, 08, 00, 00, 00, 00, 00, 00, 00, 03, 00, 11, 00, 04, 00, 01, " + "7C 16 03 01 00 00 12 00 08 00 00 00 00 00 00 00 03 00 11 00 04 00 01 " +
"00, 00, 00, 00, 00, 00, 00"; "00 00 00 00 00 00 00";
byte[] data = HexRead.readFromString(dump); byte[] data = HexRead.readFromString(dump);
List records = RecordFactory.createRecords(new ByteArrayInputStream(data)); List records = RecordFactory.createRecords(new ByteArrayInputStream(data));

View File

@ -1,4 +1,3 @@
/* ==================================================================== /* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with contributor license agreements. See the NOTICE file distributed with
@ -15,133 +14,106 @@
See the License for the specific language governing permissions and See the License for the specific language governing permissions and
limitations under the License. limitations under the License.
==================================================================== */ ==================================================================== */
package org.apache.poi.hssf.record; package org.apache.poi.hssf.record;
import junit.framework.TestCase; import junit.framework.TestCase;
import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
import org.apache.poi.util.IntMapper; import org.apache.poi.util.IntMapper;
/** /**
* Tests that records size calculates correctly. * Tests that records size calculates correctly.
* *
* @author Glen Stampoultzis (glens at apache.org) * @author Glen Stampoultzis (glens at apache.org)
*/ */
public class TestSSTRecordSizeCalculator public final class TestSSTRecordSizeCalculator extends TestCase {
extends TestCase private static final String SMALL_STRING = "Small string";
{ private static final int COMPRESSED_PLAIN_STRING_OVERHEAD = 3;
private static final String SMALL_STRING = "Small string"; private static final int OPTION_FIELD_SIZE = 1;
private static final int COMPRESSED_PLAIN_STRING_OVERHEAD = 3;
// private List recordLengths; private final IntMapper strings = new IntMapper();
private IntMapper strings;
private static final int OPTION_FIELD_SIZE = 1;
public TestSSTRecordSizeCalculator( String s )
{ private void confirmSize(int expectedSize) {
super( s ); ContinuableRecordOutput cro = ContinuableRecordOutput.createForCountingOnly();
} SSTSerializer ss = new SSTSerializer(strings, 0, 0);
ss.serialize(cro);
assertEquals(expectedSize, cro.getTotalSize());
}
public void testBasic() public void testBasic() {
throws Exception strings.add(makeUnicodeString(SMALL_STRING));
{ confirmSize(SSTRecord.SST_RECORD_OVERHEAD
strings.add(makeUnicodeString(SMALL_STRING)); + COMPRESSED_PLAIN_STRING_OVERHEAD
SSTRecordSizeCalculator calculator = new SSTRecordSizeCalculator(strings); + SMALL_STRING.length());
assertEquals(SSTRecord.SST_RECORD_OVERHEAD + COMPRESSED_PLAIN_STRING_OVERHEAD + SMALL_STRING.length(), }
calculator.getRecordSize());
}
public void testBigStringAcrossUnicode() public void testBigStringAcrossUnicode() {
throws Exception int bigString = SSTRecord.MAX_DATA_SPACE + 100;
{ strings.add(makeUnicodeString(bigString));
String bigString = new String(new char[SSTRecord.MAX_DATA_SPACE + 100]); confirmSize(SSTRecord.SST_RECORD_OVERHEAD
strings.add(makeUnicodeString(bigString)); + COMPRESSED_PLAIN_STRING_OVERHEAD
SSTRecordSizeCalculator calculator = new SSTRecordSizeCalculator(strings); + SSTRecord.MAX_DATA_SPACE
assertEquals(SSTRecord.SST_RECORD_OVERHEAD + SSTRecord.STD_RECORD_OVERHEAD
+ COMPRESSED_PLAIN_STRING_OVERHEAD + OPTION_FIELD_SIZE
+ SSTRecord.MAX_DATA_SPACE + 100);
+ SSTRecord.STD_RECORD_OVERHEAD }
+ OPTION_FIELD_SIZE
+ 100,
calculator.getRecordSize());
}
public void testPerfectFit() public void testPerfectFit() {
throws Exception int perfectFit = SSTRecord.MAX_DATA_SPACE - COMPRESSED_PLAIN_STRING_OVERHEAD;
{ strings.add(makeUnicodeString(perfectFit));
String perfectFit = new String(new char[SSTRecord.MAX_DATA_SPACE - COMPRESSED_PLAIN_STRING_OVERHEAD]); confirmSize(SSTRecord.SST_RECORD_OVERHEAD
strings.add(makeUnicodeString(perfectFit)); + COMPRESSED_PLAIN_STRING_OVERHEAD
SSTRecordSizeCalculator calculator = new SSTRecordSizeCalculator(strings); + perfectFit);
assertEquals(SSTRecord.SST_RECORD_OVERHEAD }
+ COMPRESSED_PLAIN_STRING_OVERHEAD
+ perfectFit.length(),
calculator.getRecordSize());
}
public void testJustOversized() public void testJustOversized() {
throws Exception int tooBig = SSTRecord.MAX_DATA_SPACE - COMPRESSED_PLAIN_STRING_OVERHEAD + 1;
{ strings.add(makeUnicodeString(tooBig));
String tooBig = new String(new char[SSTRecord.MAX_DATA_SPACE - COMPRESSED_PLAIN_STRING_OVERHEAD + 1]); confirmSize(SSTRecord.SST_RECORD_OVERHEAD
strings.add(makeUnicodeString(tooBig)); + COMPRESSED_PLAIN_STRING_OVERHEAD
SSTRecordSizeCalculator calculator = new SSTRecordSizeCalculator(strings); + tooBig - 1
assertEquals(SSTRecord.SST_RECORD_OVERHEAD // continue record
+ COMPRESSED_PLAIN_STRING_OVERHEAD + SSTRecord.STD_RECORD_OVERHEAD
+ tooBig.length() - 1 + OPTION_FIELD_SIZE + 1);
// continue record
+ SSTRecord.STD_RECORD_OVERHEAD
+ OPTION_FIELD_SIZE
+ 1,
calculator.getRecordSize());
} }
public void testSecondStringStartsOnNewContinuation() public void testSecondStringStartsOnNewContinuation() {
throws Exception int perfectFit = SSTRecord.MAX_DATA_SPACE - COMPRESSED_PLAIN_STRING_OVERHEAD;
{ strings.add(makeUnicodeString(perfectFit));
String perfectFit = new String(new char[SSTRecord.MAX_DATA_SPACE - COMPRESSED_PLAIN_STRING_OVERHEAD]); strings.add(makeUnicodeString(SMALL_STRING));
strings.add(makeUnicodeString(perfectFit)); confirmSize(SSTRecord.SST_RECORD_OVERHEAD
strings.add(makeUnicodeString(SMALL_STRING)); + SSTRecord.MAX_DATA_SPACE
SSTRecordSizeCalculator calculator = new SSTRecordSizeCalculator(strings); // second string
assertEquals(SSTRecord.SST_RECORD_OVERHEAD + SSTRecord.STD_RECORD_OVERHEAD
+ SSTRecord.MAX_DATA_SPACE + COMPRESSED_PLAIN_STRING_OVERHEAD
// second string + SMALL_STRING.length());
+ SSTRecord.STD_RECORD_OVERHEAD }
+ COMPRESSED_PLAIN_STRING_OVERHEAD
+ SMALL_STRING.length(),
calculator.getRecordSize());
}
public void testHeaderCrossesNormalContinuePoint() public void testHeaderCrossesNormalContinuePoint() {
throws Exception int almostPerfectFit = SSTRecord.MAX_DATA_SPACE - COMPRESSED_PLAIN_STRING_OVERHEAD - 2;
{ strings.add(makeUnicodeString(almostPerfectFit));
String almostPerfectFit = new String(new char[SSTRecord.MAX_DATA_SPACE - COMPRESSED_PLAIN_STRING_OVERHEAD - 2]); String oneCharString = new String(new char[1]);
strings.add(makeUnicodeString(almostPerfectFit)); strings.add(makeUnicodeString(oneCharString));
String oneCharString = new String(new char[1]); confirmSize(SSTRecord.SST_RECORD_OVERHEAD
strings.add(makeUnicodeString(oneCharString)); + COMPRESSED_PLAIN_STRING_OVERHEAD
SSTRecordSizeCalculator calculator = new SSTRecordSizeCalculator(strings); + almostPerfectFit
assertEquals(SSTRecord.SST_RECORD_OVERHEAD // second string
+ COMPRESSED_PLAIN_STRING_OVERHEAD + SSTRecord.STD_RECORD_OVERHEAD
+ almostPerfectFit.length() + COMPRESSED_PLAIN_STRING_OVERHEAD
// second string + oneCharString.length());
+ SSTRecord.STD_RECORD_OVERHEAD
+ COMPRESSED_PLAIN_STRING_OVERHEAD
+ oneCharString.length(),
calculator.getRecordSize());
} }
private static UnicodeString makeUnicodeString(int size) {
String s = new String(new char[size]);
public void setUp() return makeUnicodeString(s);
{ }
strings = new IntMapper();
}
private UnicodeString makeUnicodeString( String s )
{
UnicodeString st = new UnicodeString(s);
st.setOptionFlags((byte)0);
return st;
}
private static UnicodeString makeUnicodeString(String s) {
UnicodeString st = new UnicodeString(s);
st.setOptionFlags((byte) 0);
return st;
}
} }

View File

@ -18,6 +18,12 @@
package org.apache.poi.hssf.record; package org.apache.poi.hssf.record;
import org.apache.poi.util.HexRead;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.LittleEndianByteArrayInputStream;
import org.apache.poi.util.LittleEndianInput;
import junit.framework.AssertionFailedError;
import junit.framework.TestCase; import junit.framework.TestCase;
/** /**
@ -28,29 +34,66 @@ import junit.framework.TestCase;
* @author Glen Stampoultzis (glens at apache.org) * @author Glen Stampoultzis (glens at apache.org)
*/ */
public final class TestStringRecord extends TestCase { public final class TestStringRecord extends TestCase {
byte[] data = new byte[] { private static final byte[] data = HexRead.readFromString(
(byte)0x0B,(byte)0x00, // length "0B 00 " + // length
(byte)0x00, // option "00 " + // option
// string // string
(byte)0x46,(byte)0x61,(byte)0x68,(byte)0x72,(byte)0x7A,(byte)0x65,(byte)0x75,(byte)0x67,(byte)0x74,(byte)0x79,(byte)0x70 "46 61 68 72 7A 65 75 67 74 79 70"
}; );
public void testLoad() { public void testLoad() {
StringRecord record = new StringRecord(TestcaseRecordInputStream.create(0x207, data)); StringRecord record = new StringRecord(TestcaseRecordInputStream.create(0x207, data));
assertEquals( "Fahrzeugtyp", record.getString()); assertEquals( "Fahrzeugtyp", record.getString());
assertEquals( 18, record.getRecordSize() ); assertEquals( 18, record.getRecordSize() );
} }
public void testStore() public void testStore() {
{ StringRecord record = new StringRecord();
StringRecord record = new StringRecord(); record.setString("Fahrzeugtyp");
record.setString("Fahrzeugtyp");
byte [] recordBytes = record.serialize(); byte [] recordBytes = record.serialize();
assertEquals(recordBytes.length - 4, data.length); assertEquals(recordBytes.length - 4, data.length);
for (int i = 0; i < data.length; i++) for (int i = 0; i < data.length; i++)
assertEquals("At offset " + i, data[i], recordBytes[i+4]); assertEquals("At offset " + i, data[i], recordBytes[i+4]);
} }
public void testContinue() {
int MAX_BIFF_DATA = RecordInputStream.MAX_RECORD_DATA_SIZE;
int TEXT_LEN = MAX_BIFF_DATA + 1000; // deliberately over-size
String textChunk = "ABCDEGGHIJKLMNOP"; // 16 chars
StringBuffer sb = new StringBuffer(16384);
while (sb.length() < TEXT_LEN) {
sb.append(textChunk);
}
sb.setLength(TEXT_LEN);
StringRecord sr = new StringRecord();
sr.setString(sb.toString());
byte[] ser = sr.serialize();
assertEquals(StringRecord.sid, LittleEndian.getUShort(ser, 0));
if (LittleEndian.getUShort(ser, 2) > MAX_BIFF_DATA) {
throw new AssertionFailedError(
"StringRecord should have been split with a continue record");
}
// Confirm expected size of first record, and ushort strLen.
assertEquals(MAX_BIFF_DATA, LittleEndian.getUShort(ser, 2));
assertEquals(TEXT_LEN, LittleEndian.getUShort(ser, 4));
// Confirm first few bytes of ContinueRecord
LittleEndianInput crIn = new LittleEndianByteArrayInputStream(ser, (MAX_BIFF_DATA + 4));
int nCharsInFirstRec = MAX_BIFF_DATA - (2 + 1); // strLen, optionFlags
int nCharsInSecondRec = TEXT_LEN - nCharsInFirstRec;
assertEquals(ContinueRecord.sid, crIn.readUShort());
assertEquals(1 + nCharsInSecondRec, crIn.readUShort());
assertEquals(0, crIn.readUByte());
assertEquals('N', crIn.readUByte());
assertEquals('O', crIn.readUByte());
// re-read and make sure string value is the same
RecordInputStream in = TestcaseRecordInputStream.create(ser);
StringRecord sr2 = new StringRecord(in);
assertEquals(sb.toString(), sr2.getString());
}
} }

View File

@ -69,10 +69,10 @@ public final class TestSupBookRecord extends TestCase {
assertEquals( 34, record.getRecordSize() ); //sid+size+data assertEquals( 34, record.getRecordSize() ); //sid+size+data
assertEquals("testURL", record.getURL()); assertEquals("testURL", record.getURL());
UnicodeString[] sheetNames = record.getSheetNames(); String[] sheetNames = record.getSheetNames();
assertEquals(2, sheetNames.length); assertEquals(2, sheetNames.length);
assertEquals("Sheet1", sheetNames[0].getString()); assertEquals("Sheet1", sheetNames[0]);
assertEquals("Sheet2", sheetNames[1].getString()); assertEquals("Sheet2", sheetNames[1]);
} }
/** /**
@ -97,11 +97,8 @@ public final class TestSupBookRecord extends TestCase {
} }
public void testStoreER() { public void testStoreER() {
UnicodeString url = new UnicodeString("testURL"); String url = "testURL";
UnicodeString[] sheetNames = { String[] sheetNames = { "Sheet1", "Sheet2", };
new UnicodeString("Sheet1"),
new UnicodeString("Sheet2"),
};
SupBookRecord record = SupBookRecord.createExternalReferences(url, sheetNames); SupBookRecord record = SupBookRecord.createExternalReferences(url, sheetNames);
TestcaseRecordInputStream.confirmRecordEncoding(0x01AE, dataER, record.serialize()); TestcaseRecordInputStream.confirmRecordEncoding(0x01AE, dataER, record.serialize());

View File

@ -44,9 +44,9 @@ public final class TestTextObjectBaseRecord extends TestCase {
"00 00" + "00 00" +
"00 00 " + "00 00 " +
"3C 00 " + // ContinueRecord.sid "3C 00 " + // ContinueRecord.sid
"05 00 " + // size 5 "03 00 " + // size 3
"01 " + // unicode uncompressed "00 " + // unicode compressed
"41 00 42 00 " + // 'AB' "41 42 " + // 'AB'
"3C 00 " + // ContinueRecord.sid "3C 00 " + // ContinueRecord.sid
"10 00 " + // size 16 "10 00 " + // size 16
"00 00 18 00 00 00 00 00 " + "00 00 18 00 00 00 00 00 " +
@ -63,7 +63,7 @@ public final class TestTextObjectBaseRecord extends TestCase {
assertEquals(true, record.isTextLocked()); assertEquals(true, record.isTextLocked());
assertEquals(TextObjectRecord.TEXT_ORIENTATION_ROT_RIGHT, record.getTextOrientation()); assertEquals(TextObjectRecord.TEXT_ORIENTATION_ROT_RIGHT, record.getTextOrientation());
assertEquals(51, record.getRecordSize() ); assertEquals(49, record.getRecordSize() );
} }
public void testStore() public void testStore()

View File

@ -37,16 +37,14 @@ import org.apache.poi.util.LittleEndian;
public final class TestTextObjectRecord extends TestCase { public final class TestTextObjectRecord extends TestCase {
private static final byte[] simpleData = HexRead.readFromString( private static final byte[] simpleData = HexRead.readFromString(
"B6 01 12 00 " + "B6 01 12 00 " +
"12 02 00 00 00 00 00 00" + "12 02 00 00 00 00 00 00" +
"00 00 0D 00 08 00 00 00" + "00 00 0D 00 08 00 00 00" +
"00 00 " + "00 00 " +
"3C 00 1B 00 " + "3C 00 0E 00 " +
"01 48 00 65 00 6C 00 6C 00 6F 00 " + "00 48 65 6C 6C 6F 2C 20 57 6F 72 6C 64 21 " +
"2C 00 20 00 57 00 6F 00 72 00 6C " + "3C 00 08 " +
"00 64 00 21 00 " + "00 0D 00 00 00 00 00 00 00"
"3C 00 08 " +
"00 0D 00 00 00 00 00 00 00"
); );
@ -92,12 +90,12 @@ public final class TestTextObjectRecord extends TestCase {
record.setStr(str); record.setStr(str);
byte [] ser = record.serialize(); byte [] ser = record.serialize();
int formatDataLen = LittleEndian.getUShort(ser, 16); int formatDataLen = LittleEndian.getUShort(ser, 16);
assertEquals("formatDataLength", 0, formatDataLen); assertEquals("formatDataLength", 0, formatDataLen);
assertEquals(22, ser.length); // just the TXO record assertEquals(22, ser.length); // just the TXO record
//read again //read again
RecordInputStream is = TestcaseRecordInputStream.create(ser); RecordInputStream is = TestcaseRecordInputStream.create(ser);
record = new TextObjectRecord(is); record = new TextObjectRecord(is);
@ -152,38 +150,38 @@ public final class TestTextObjectRecord extends TestCase {
byte[] cln = cloned.serialize(); byte[] cln = cloned.serialize();
assertTrue(Arrays.equals(src, cln)); assertTrue(Arrays.equals(src, cln));
} }
/** similar to {@link #simpleData} but with link formula at end of TXO rec*/ /** similar to {@link #simpleData} but with link formula at end of TXO rec*/
private static final byte[] linkData = HexRead.readFromString( private static final byte[] linkData = HexRead.readFromString(
"B6 01 " + // TextObjectRecord.sid "B6 01 " + // TextObjectRecord.sid
"1E 00 " + // size 18 "1E 00 " + // size 18
"44 02 02 00 00 00 00 00" + "44 02 02 00 00 00 00 00" +
"00 00 " + "00 00 " +
"02 00 " + // strLen 2 "02 00 " + // strLen 2
"10 00 " + // 16 bytes for 2 format runs "10 00 " + // 16 bytes for 2 format runs
"00 00 00 00 " + "00 00 00 00 " +
"05 00 " + // formula size "05 00 " + // formula size
"D4 F0 8A 03 " + // unknownInt "D4 F0 8A 03 " + // unknownInt
"24 01 00 13 C0 " + //tRef(T2) "24 01 00 13 C0 " + //tRef(T2)
"13 " + // ?? "13 " + // ??
"3C 00 " + // ContinueRecord.sid "3C 00 " + // ContinueRecord.sid
"05 00 " + // size 5 "03 00 " + // size 3
"01 " + // unicode uncompressed "00 " + // unicode compressed
"41 00 42 00 " + // 'AB' "41 42 " + // 'AB'
"3C 00 " + // ContinueRecord.sid "3C 00 " + // ContinueRecord.sid
"10 00 " + // size 16 "10 00 " + // size 16
"00 00 18 00 00 00 00 00 " + "00 00 18 00 00 00 00 00 " +
"02 00 00 00 00 00 00 00 " "02 00 00 00 00 00 00 00 "
); );
public void testLinkFormula() { public void testLinkFormula() {
RecordInputStream is = new RecordInputStream(new ByteArrayInputStream(linkData)); RecordInputStream is = new RecordInputStream(new ByteArrayInputStream(linkData));
is.nextRecord(); is.nextRecord();
TextObjectRecord rec = new TextObjectRecord(is); TextObjectRecord rec = new TextObjectRecord(is);
Ptg ptg = rec.getLinkRefPtg(); Ptg ptg = rec.getLinkRefPtg();
assertNotNull(ptg); assertNotNull(ptg);
assertEquals(RefPtg.class, ptg.getClass()); assertEquals(RefPtg.class, ptg.getClass());
@ -193,6 +191,6 @@ public final class TestTextObjectRecord extends TestCase {
byte [] data2 = rec.serialize(); byte [] data2 = rec.serialize();
assertEquals(linkData.length, data2.length); assertEquals(linkData.length, data2.length);
assertTrue(Arrays.equals(linkData, data2)); assertTrue(Arrays.equals(linkData, data2));
} }
} }

View File

@ -15,115 +15,123 @@
limitations under the License. limitations under the License.
==================================================================== */ ==================================================================== */
package org.apache.poi.hssf.record; package org.apache.poi.hssf.record;
import org.apache.poi.util.HexRead;
import junit.framework.TestCase; import junit.framework.TestCase;
import org.apache.poi.hssf.record.cont.ContinuableRecordOutput;
/** /**
* Tests that records size calculates correctly. * Tests that {@link UnicodeString} record size calculates correctly. The record size
* is used when serializing {@link SSTRecord}s.
* *
* @author Jason Height (jheight at apache.org) * @author Jason Height (jheight at apache.org)
*/ */
public final class TestUnicodeString extends TestCase { public final class TestUnicodeString extends TestCase {
private static final int MAX_DATA_SIZE = RecordInputStream.MAX_RECORD_DATA_SIZE;
/** a 4 character string requiring 16 bit encoding */
private static final String STR_16_BIT = "A\u591A\u8A00\u8A9E";
private static void confirmSize(int expectedSize, UnicodeString s) {
confirmSize(expectedSize, s, 0);
}
/**
* Note - a value of zero for <tt>amountUsedInCurrentRecord</tt> would only ever occur just
* after a {@link ContinueRecord} had been started. In the initial {@link SSTRecord} this
* value starts at 8 (for the first {@link UnicodeString} written). In general, it can be
* any value between 0 and {@link #MAX_DATA_SIZE}
*/
private static void confirmSize(int expectedSize, UnicodeString s, int amountUsedInCurrentRecord) {
ContinuableRecordOutput out = ContinuableRecordOutput.createForCountingOnly();
out.writeContinue();
for(int i=amountUsedInCurrentRecord; i>0; i--) {
out.writeByte(0);
}
int size0 = out.getTotalSize();
s.serialize(out);
int size1 = out.getTotalSize();
int actualSize = size1-size0;
assertEquals(expectedSize, actualSize);
}
public void testSmallStringSize() { public void testSmallStringSize() {
//Test a basic string //Test a basic string
UnicodeString s = makeUnicodeString("Test"); UnicodeString s = makeUnicodeString("Test");
UnicodeString.UnicodeRecordStats stats = new UnicodeString.UnicodeRecordStats(); confirmSize(7, s);
s.getRecordSize(stats);
assertEquals(7, stats.recordSize);
//Test a small string that is uncompressed //Test a small string that is uncompressed
s = makeUnicodeString(STR_16_BIT);
s.setOptionFlags((byte)0x01); s.setOptionFlags((byte)0x01);
stats = new UnicodeString.UnicodeRecordStats(); confirmSize(11, s);
s.getRecordSize(stats);
assertEquals(11, stats.recordSize);
//Test a compressed small string that has rich text formatting //Test a compressed small string that has rich text formatting
s.setString("Test");
s.setOptionFlags((byte)0x8); s.setOptionFlags((byte)0x8);
UnicodeString.FormatRun r = new UnicodeString.FormatRun((short)0,(short)1); UnicodeString.FormatRun r = new UnicodeString.FormatRun((short)0,(short)1);
s.addFormatRun(r); s.addFormatRun(r);
UnicodeString.FormatRun r2 = new UnicodeString.FormatRun((short)2,(short)2); UnicodeString.FormatRun r2 = new UnicodeString.FormatRun((short)2,(short)2);
s.addFormatRun(r2); s.addFormatRun(r2);
stats = new UnicodeString.UnicodeRecordStats(); confirmSize(17, s);
s.getRecordSize(stats);
assertEquals(17, stats.recordSize);
//Test a uncompressed small string that has rich text formatting //Test a uncompressed small string that has rich text formatting
s.setString(STR_16_BIT);
s.setOptionFlags((byte)0x9); s.setOptionFlags((byte)0x9);
stats = new UnicodeString.UnicodeRecordStats(); confirmSize(21, s);
s.getRecordSize(stats);
assertEquals(21, stats.recordSize);
//Test a compressed small string that has rich text and extended text //Test a compressed small string that has rich text and extended text
s.setString("Test");
s.setOptionFlags((byte)0xC); s.setOptionFlags((byte)0xC);
s.setExtendedRst(new byte[]{(byte)0x1,(byte)0x2,(byte)0x3,(byte)0x4,(byte)0x5}); s.setExtendedRst(new byte[]{(byte)0x1,(byte)0x2,(byte)0x3,(byte)0x4,(byte)0x5});
stats = new UnicodeString.UnicodeRecordStats(); confirmSize(26, s);
s.getRecordSize(stats);
assertEquals(26, stats.recordSize);
//Test a uncompressed small string that has rich text and extended text //Test a uncompressed small string that has rich text and extended text
s.setString(STR_16_BIT);
s.setOptionFlags((byte)0xD); s.setOptionFlags((byte)0xD);
stats = new UnicodeString.UnicodeRecordStats(); confirmSize(30, s);
s.getRecordSize(stats);
assertEquals(30, stats.recordSize);
} }
public void testPerfectStringSize() { public void testPerfectStringSize() {
//Test a basic string //Test a basic string
UnicodeString s = makeUnicodeString(SSTRecord.MAX_RECORD_SIZE-2-1); UnicodeString s = makeUnicodeString(MAX_DATA_SIZE-2-1);
UnicodeString.UnicodeRecordStats stats = new UnicodeString.UnicodeRecordStats(); confirmSize(MAX_DATA_SIZE, s);
s.getRecordSize(stats);
assertEquals(SSTRecord.MAX_RECORD_SIZE, stats.recordSize);
//Test an uncompressed string //Test an uncompressed string
//Note that we can only ever get to a maximim size of 8227 since an uncompressed //Note that we can only ever get to a maximim size of 8227 since an uncompressed
//string is writing double bytes. //string is writing double bytes.
s = makeUnicodeString((SSTRecord.MAX_RECORD_SIZE-2-1)/2); s = makeUnicodeString((MAX_DATA_SIZE-2-1)/2, true);
s.setOptionFlags((byte)0x1); s.setOptionFlags((byte)0x1);
stats = new UnicodeString.UnicodeRecordStats(); confirmSize(MAX_DATA_SIZE-1, s);
s.getRecordSize(stats);
assertEquals(SSTRecord.MAX_RECORD_SIZE-1, stats.recordSize);
} }
public void testPerfectRichStringSize() { public void testPerfectRichStringSize() {
//Test a rich text string //Test a rich text string
UnicodeString s = makeUnicodeString(SSTRecord.MAX_RECORD_SIZE-2-1-8-2); UnicodeString s = makeUnicodeString(MAX_DATA_SIZE-2-1-8-2);
s.addFormatRun(new UnicodeString.FormatRun((short)1,(short)0)); s.addFormatRun(new UnicodeString.FormatRun((short)1,(short)0));
s.addFormatRun(new UnicodeString.FormatRun((short)2,(short)1)); s.addFormatRun(new UnicodeString.FormatRun((short)2,(short)1));
UnicodeString.UnicodeRecordStats stats = new UnicodeString.UnicodeRecordStats();
s.setOptionFlags((byte)0x8); s.setOptionFlags((byte)0x8);
s.getRecordSize(stats); confirmSize(MAX_DATA_SIZE, s);
assertEquals(SSTRecord.MAX_RECORD_SIZE, stats.recordSize);
//Test an uncompressed rich text string //Test an uncompressed rich text string
//Note that we can only ever get to a maximim size of 8227 since an uncompressed //Note that we can only ever get to a maximum size of 8227 since an uncompressed
//string is writing double bytes. //string is writing double bytes.
s = makeUnicodeString((SSTRecord.MAX_RECORD_SIZE-2-1-8-2)/2); s = makeUnicodeString((MAX_DATA_SIZE-2-1-8-2)/2, true);
s.addFormatRun(new UnicodeString.FormatRun((short)1,(short)0)); s.addFormatRun(new UnicodeString.FormatRun((short)1,(short)0));
s.addFormatRun(new UnicodeString.FormatRun((short)2,(short)1)); s.addFormatRun(new UnicodeString.FormatRun((short)2,(short)1));
s.setOptionFlags((byte)0x9); s.setOptionFlags((byte)0x9);
stats = new UnicodeString.UnicodeRecordStats(); confirmSize(MAX_DATA_SIZE-1, s);
s.getRecordSize(stats);
assertEquals(SSTRecord.MAX_RECORD_SIZE-1, stats.recordSize);
} }
public void testContinuedStringSize() { public void testContinuedStringSize() {
//Test a basic string //Test a basic string
UnicodeString s = makeUnicodeString(SSTRecord.MAX_RECORD_SIZE-2-1+20); UnicodeString s = makeUnicodeString(MAX_DATA_SIZE-2-1+20);
UnicodeString.UnicodeRecordStats stats = new UnicodeString.UnicodeRecordStats(); confirmSize(MAX_DATA_SIZE+4+1+20, s);
s.getRecordSize(stats);
assertEquals(SSTRecord.MAX_RECORD_SIZE+4+1+20, stats.recordSize);
} }
/** Tests that a string size calculation that fits neatly in two records, the second being a continue*/ /** Tests that a string size calculation that fits neatly in two records, the second being a continue*/
public void testPerfectContinuedStringSize() { public void testPerfectContinuedStringSize() {
//Test a basic string //Test a basic string
int strSize = SSTRecord.MAX_RECORD_SIZE*2; int strSize = MAX_DATA_SIZE*2;
//String overhead //String overhead
strSize -= 3; strSize -= 3;
//Continue Record overhead //Continue Record overhead
@ -131,25 +139,29 @@ public final class TestUnicodeString extends TestCase {
//Continue Record additional byte overhead //Continue Record additional byte overhead
strSize -= 1; strSize -= 1;
UnicodeString s = makeUnicodeString(strSize); UnicodeString s = makeUnicodeString(strSize);
UnicodeString.UnicodeRecordStats stats = new UnicodeString.UnicodeRecordStats(); confirmSize(MAX_DATA_SIZE*2, s);
s.getRecordSize(stats);
assertEquals(SSTRecord.MAX_RECORD_SIZE*2, stats.recordSize);
} }
private static UnicodeString makeUnicodeString(String s) {
private static UnicodeString makeUnicodeString( String s )
{
UnicodeString st = new UnicodeString(s); UnicodeString st = new UnicodeString(s);
st.setOptionFlags((byte)0); st.setOptionFlags((byte)0);
return st; return st;
} }
private static UnicodeString makeUnicodeString( int numChars) { private static UnicodeString makeUnicodeString(int numChars) {
return makeUnicodeString(numChars, false);
}
/**
* @param is16Bit if <code>true</code> the created string will have characters > 0x00FF
* @return a string of the specified number of characters
*/
private static UnicodeString makeUnicodeString(int numChars, boolean is16Bit) {
StringBuffer b = new StringBuffer(numChars); StringBuffer b = new StringBuffer(numChars);
int charBase = is16Bit ? 0x8A00 : 'A';
for (int i=0;i<numChars;i++) { for (int i=0;i<numChars;i++) {
b.append(i%10); char ch = (char) ((i%16)+charBase);
b.append(ch);
} }
return makeUnicodeString(b.toString()); return makeUnicodeString(b.toString());
} }

View File

@ -22,7 +22,6 @@ import java.util.Arrays;
import junit.framework.TestCase; import junit.framework.TestCase;
import org.apache.poi.hssf.record.TestcaseRecordInputStream; import org.apache.poi.hssf.record.TestcaseRecordInputStream;
import org.apache.poi.hssf.record.UnicodeString;
import org.apache.poi.hssf.usermodel.HSSFErrorConstants; import org.apache.poi.hssf.usermodel.HSSFErrorConstants;
import org.apache.poi.util.HexRead; import org.apache.poi.util.HexRead;
import org.apache.poi.util.LittleEndianByteArrayOutputStream; import org.apache.poi.util.LittleEndianByteArrayOutputStream;
@ -36,7 +35,7 @@ public final class TestConstantValueParser extends TestCase {
Boolean.TRUE, Boolean.TRUE,
null, null,
new Double(1.1), new Double(1.1),
new UnicodeString("Sample text"), "Sample text",
ErrorConstant.valueOf(HSSFErrorConstants.ERROR_DIV_0), ErrorConstant.valueOf(HSSFErrorConstants.ERROR_DIV_0),
}; };
private static final byte[] SAMPLE_ENCODING = HexRead.readFromString( private static final byte[] SAMPLE_ENCODING = HexRead.readFromString(

View File

@ -66,10 +66,10 @@ public final class TestArrayPtg extends TestCase {
assertEquals(Boolean.TRUE, values[0][0]); assertEquals(Boolean.TRUE, values[0][0]);
assertEquals(new UnicodeString("ABCD"), values[0][1]); assertEquals("ABCD", values[0][1]);
assertEquals(new Double(0), values[1][0]); assertEquals(new Double(0), values[1][0]);
assertEquals(Boolean.FALSE, values[1][1]); assertEquals(Boolean.FALSE, values[1][1]);
assertEquals(new UnicodeString("FG"), values[1][2]); assertEquals("FG", values[1][2]);
byte[] outBuf = new byte[ENCODED_CONSTANT_DATA.length]; byte[] outBuf = new byte[ENCODED_CONSTANT_DATA.length];
ptg.writeTokenValueBytes(new LittleEndianByteArrayOutputStream(outBuf, 0)); ptg.writeTokenValueBytes(new LittleEndianByteArrayOutputStream(outBuf, 0));

View File

@ -28,8 +28,8 @@ import org.apache.poi.hssf.model.Sheet;
import org.apache.poi.hssf.util.HSSFColor; import org.apache.poi.hssf.util.HSSFColor;
/** /**
* Tests various functionity having to do with HSSFCell. For instance support for * Tests various functionality having to do with {@link HSSFCell}. For instance support for
* paticular datatypes, etc. * particular datatypes, etc.
* @author Andrew C. Oliver (andy at superlinksoftware dot com) * @author Andrew C. Oliver (andy at superlinksoftware dot com)
* @author Dan Sherman (dsherman at isisph.com) * @author Dan Sherman (dsherman at isisph.com)
* @author Alex Jacoby (ajacoby at gmail.com) * @author Alex Jacoby (ajacoby at gmail.com)
@ -345,41 +345,82 @@ public final class TestHSSFCell extends TestCase {
} }
} }
/** /**
* Test to ensure we can only assign cell styles that belong * Test to ensure we can only assign cell styles that belong
* to our workbook, and not those from other workbooks. * to our workbook, and not those from other workbooks.
*/ */
public void testCellStyleWorkbookMatch() throws Exception { public void testCellStyleWorkbookMatch() {
HSSFWorkbook wbA = new HSSFWorkbook(); HSSFWorkbook wbA = new HSSFWorkbook();
HSSFWorkbook wbB = new HSSFWorkbook(); HSSFWorkbook wbB = new HSSFWorkbook();
HSSFCellStyle styA = wbA.createCellStyle(); HSSFCellStyle styA = wbA.createCellStyle();
HSSFCellStyle styB = wbB.createCellStyle(); HSSFCellStyle styB = wbB.createCellStyle();
styA.verifyBelongsToWorkbook(wbA); styA.verifyBelongsToWorkbook(wbA);
styB.verifyBelongsToWorkbook(wbB); styB.verifyBelongsToWorkbook(wbB);
try { try {
styA.verifyBelongsToWorkbook(wbB); styA.verifyBelongsToWorkbook(wbB);
fail(); fail();
} catch(IllegalArgumentException e) {} } catch (IllegalArgumentException e) {}
try { try {
styB.verifyBelongsToWorkbook(wbA); styB.verifyBelongsToWorkbook(wbA);
fail(); fail();
} catch(IllegalArgumentException e) {} } catch (IllegalArgumentException e) {}
HSSFCell cellA = wbA.createSheet().createRow(0).createCell(0); HSSFCell cellA = wbA.createSheet().createRow(0).createCell(0);
HSSFCell cellB = wbB.createSheet().createRow(0).createCell(0); HSSFCell cellB = wbB.createSheet().createRow(0).createCell(0);
cellA.setCellStyle(styA); cellA.setCellStyle(styA);
cellB.setCellStyle(styB); cellB.setCellStyle(styB);
try { try {
cellA.setCellStyle(styB); cellA.setCellStyle(styB);
fail(); fail();
} catch(IllegalArgumentException e) {} } catch (IllegalArgumentException e) {}
try { try {
cellB.setCellStyle(styA); cellB.setCellStyle(styA);
fail(); fail();
} catch(IllegalArgumentException e) {} } catch (IllegalArgumentException e) {}
} }
public void testChangeTypeStringToBool() {
HSSFCell cell = new HSSFWorkbook().createSheet("Sheet1").createRow(0).createCell(0);
cell.setCellValue(new HSSFRichTextString("TRUE"));
assertEquals(HSSFCell.CELL_TYPE_STRING, cell.getCellType());
try {
cell.setCellType(HSSFCell.CELL_TYPE_BOOLEAN);
} catch (ClassCastException e) {
throw new AssertionFailedError(
"Identified bug in conversion of cell from text to boolean");
}
assertEquals(HSSFCell.CELL_TYPE_BOOLEAN, cell.getCellType());
assertEquals(true, cell.getBooleanCellValue());
cell.setCellType(HSSFCell.CELL_TYPE_STRING);
assertEquals("TRUE", cell.getRichStringCellValue().getString());
// 'false' text to bool and back
cell.setCellValue(new HSSFRichTextString("FALSE"));
cell.setCellType(HSSFCell.CELL_TYPE_BOOLEAN);
assertEquals(HSSFCell.CELL_TYPE_BOOLEAN, cell.getCellType());
assertEquals(false, cell.getBooleanCellValue());
cell.setCellType(HSSFCell.CELL_TYPE_STRING);
assertEquals("FALSE", cell.getRichStringCellValue().getString());
}
public void testChangeTypeBoolToString() {
HSSFCell cell = new HSSFWorkbook().createSheet("Sheet1").createRow(0).createCell(0);
cell.setCellValue(true);
try {
cell.setCellType(HSSFCell.CELL_TYPE_STRING);
} catch (IllegalStateException e) {
if (e.getMessage().equals("Cannot get a text value from a boolean cell")) {
throw new AssertionFailedError(
"Identified bug in conversion of cell from boolean to text");
}
throw e;
}
assertEquals("TRUE", cell.getRichStringCellValue().getString());
}
} }