Back patched my changes from HEAD

- Zero length LabelRecord
  - Incorrect offset for LabelRecord
  - ExtSST serializetion (length not calculated correctly)
  - StringRecord is in value section


git-svn-id: https://svn.apache.org/repos/asf/jakarta/poi/branches/REL_2_BRANCH@353367 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jason Height 2003-09-23 00:35:16 +00:00
parent 2b241e6823
commit f120f93536
6 changed files with 54 additions and 23 deletions

View File

@ -65,7 +65,6 @@ import org.apache.poi.util.LittleEndian;
/**
* Extended SST table info subrecord<P>
* contains the elements of "info" in the SST's array field<P>
* WE HAVE VERY LITTLE INFORMATION ON HOW TO IMPLEMENT THIS RECORD! (EXTSSST)<P>
* @author Andrew C. Oliver (acoliver at apache dot org)
* @version 2.0-pre
* @see org.apache.poi.hssf.record.ExtSSTRecord

View File

@ -61,13 +61,13 @@ import java.util.ArrayList;
/**
* Title: Extended Static String Table<P>
* Description: I really don't understand this thing... its supposed to be "a hash
* table for optimizing external copy operations" --
*<P>
* This sounds like a job for Marc "BitMaster" Johnson aka the
* "Hawaiian Master Chef".<P>
* Description: This record is used for a quick lookup into the SST record. This
* record breaks the SST table into a set of buckets. The offsets
* to these buckets within the SST record are kept as well as the
* position relative to the start of the SST record.
* REFERENCE: PG 313 Microsoft Excel 97 Developer's Kit (ISBN: 1-57231-498-2)<P>
* @author Andrew C. Oliver (acoliver at apache dot org)
* @author Jason Height (jheight at apache dot org)
* @version 2.0-pre
* @see org.apache.poi.hssf.record.ExtSSTInfoSubRecord
*/
@ -75,8 +75,9 @@ import java.util.ArrayList;
public class ExtSSTRecord
extends Record
{
public static final int DEFAULT_BUCKET_SIZE = 8;
public final static short sid = 0xff;
private short field_1_strings_per_bucket;
private short field_1_strings_per_bucket = DEFAULT_BUCKET_SIZE;
private ArrayList field_2_sst_info;
@ -120,12 +121,11 @@ public class ExtSSTRecord
}
}
// this probably doesn't work but we don't really care at this point
protected void fillFields(byte [] data, short size, int offset)
{
field_2_sst_info = new ArrayList();
field_1_strings_per_bucket = LittleEndian.getShort(data, 0 + offset);
for (int k = 2; k < ((data.length - offset) - size); k += 8)
for (int k = 2; k < (size-offset); k += 8)
{
byte[] tempdata = new byte[ 8 + offset ];
@ -196,16 +196,15 @@ public class ExtSSTRecord
for (int k = 0; k < getNumInfoRecords(); k++)
{
System.arraycopy(getInfoRecordAt(k).serialize(), 0, data,
pos + offset, 8);
pos += getInfoRecordAt(k).getRecordSize();
ExtSSTInfoSubRecord rec = getInfoRecordAt(k);
pos += rec.serialize(pos + offset, data);
}
return getRecordSize();
return pos;
}
public int getRecordSize()
{
return 4 + 2 + field_2_sst_info.size() * 8;
return 6 + 8*getNumInfoRecords();
}
public short getSid()

View File

@ -150,15 +150,16 @@ public class LabelRecord
field_3_xf_index = LittleEndian.getShort(data, 4 + offset);
field_4_string_len = LittleEndian.getShort(data, 6 + offset);
field_5_unicode_flag = data[ 8 + offset ];
if (isUnCompressedUnicode())
{
field_6_value = StringUtil.getFromUnicode(data, 8 + offset,
if (field_4_string_len > 0) {
if (isUnCompressedUnicode()) {
field_6_value = StringUtil.getFromUnicode(data, 9 + offset,
field_4_string_len);
}
else
{
field_6_value = StringUtil.getFromCompressedUnicode(data, 9 + offset, getStringLength());
else {
field_6_value = StringUtil.getFromCompressedUnicode(data, 9 + offset,
getStringLength());
}
} else field_6_value = null;
}
/* READ ONLY ACCESS... THIS IS FOR COMPATIBILITY ONLY...USE LABELSST!
@ -237,6 +238,27 @@ public class LabelRecord
return this.sid;
}
public String toString()
{
StringBuffer buffer = new StringBuffer();
buffer.append("[LABEL]\n");
buffer.append(" .row = ")
.append(Integer.toHexString(getRow())).append("\n");
buffer.append(" .column = ")
.append(Integer.toHexString(getColumn())).append("\n");
buffer.append(" .xfindex = ")
.append(Integer.toHexString(getXFIndex())).append("\n");
buffer.append(" .string_len = ")
.append(Integer.toHexString(field_4_string_len)).append("\n");
buffer.append(" .unicode_flag = ")
.append(Integer.toHexString(field_5_unicode_flag)).append("\n");
buffer.append(" .value = ")
.append(getValue()).append("\n");
buffer.append("[/LABEL]\n");
return buffer.toString();
}
public boolean isBefore(CellValueRecordInterface i)
{
if (this.getRow() > i.getRow())

View File

@ -586,7 +586,10 @@ public class SSTRecord
*/
public int calcExtSSTRecordSize()
{
return 4 + 2 + ((field_3_strings.size() / SSTSerializer.DEFAULT_BUCKET_SIZE) + 1) * 8;
int infoRecs = (field_3_strings.size() / SSTSerializer.DEFAULT_BUCKET_SIZE);
if ((field_3_strings.size() % SSTSerializer.DEFAULT_BUCKET_SIZE) != 0)
infoRecs ++;
return 4 + 2 + (infoRecs * 8);
}
}

View File

@ -93,8 +93,11 @@ class SSTSerializer
this.numUniqueStrings = numUniqueStrings;
this.sstRecordHeader = new SSTRecordHeader( numStrings, numUniqueStrings );
this.bucketAbsoluteOffsets = new int[strings.size()/DEFAULT_BUCKET_SIZE+1];
this.bucketRelativeOffsets = new int[strings.size()/DEFAULT_BUCKET_SIZE+1];
int infoRecs = (strings.size() / SSTSerializer.DEFAULT_BUCKET_SIZE);
if ((strings.size() % SSTSerializer.DEFAULT_BUCKET_SIZE) != 0)
infoRecs ++;
this.bucketAbsoluteOffsets = new int[infoRecs];
this.bucketRelativeOffsets = new int[infoRecs];
}
/**

View File

@ -138,6 +138,11 @@ public class StringRecord
}
}
public boolean isInValueSection()
{
return true;
}
private int getStringLength()
{
return field_1_string_length;