Patch 46301 - (from Patrick Cheng) - added some pivot table records: SXDI, SXVDEX, SXPI, SXIDSTM, SXVIEW, SXVD, SXVS, and others.

Improved command line parsing in BiffViewer

git-svn-id: https://svn.apache.org/repos/asf/poi/trunk@721007 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Josh Micich 2008-11-26 22:32:07 +00:00
parent e95e166390
commit 5e29862bea
16 changed files with 1072 additions and 80 deletions

View File

@ -37,6 +37,7 @@
<!-- Don't forget to update status.xml too! -->
<release version="3.5-beta5" date="2008-??-??">
<action dev="POI-DEVELOPERS" type="add">46301 - added pivot table records: SXDI, SXVDEX, SXPI, SXIDSTM, SXVIEW, SXVD, SXVS, et al</action>
<action dev="POI-DEVELOPERS" type="fix">46280 - Fixed RowRecordsAggregate etc to properly skip PivotTable records</action>
</release>
<release version="3.5-beta4" date="2008-11-29">

View File

@ -34,6 +34,7 @@
<!-- Don't forget to update changes.xml too! -->
<changes>
<release version="3.5-beta5" date="2008-??-??">
<action dev="POI-DEVELOPERS" type="add">46301 - added pivot table records: SXDI, SXVDEX, SXPI, SXIDSTM, SXVIEW, SXVD, SXVS, et al</action>
<action dev="POI-DEVELOPERS" type="fix">46280 - Fixed RowRecordsAggregate etc to properly skip PivotTable records</action>
</release>
<release version="3.5-beta4" date="2008-11-29">

View File

@ -32,6 +32,7 @@ import java.util.List;
import org.apache.poi.hssf.record.*;
import org.apache.poi.hssf.record.chart.*;
import org.apache.poi.hssf.record.pivottable.*;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndian;
@ -59,7 +60,7 @@ public final class BiffViewer {
*/
public static Record[] createRecords(InputStream is, PrintStream ps, BiffRecordListener recListener, boolean dumpInterpretedRecords)
throws RecordFormatException {
ArrayList temp = new ArrayList();
List<Record> temp = new ArrayList<Record>();
RecordInputStream recStream = new RecordInputStream(is);
while (recStream.hasNextRecord()) {
@ -210,6 +211,7 @@ public final class BiffViewer {
case StyleRecord.sid: return new StyleRecord(in);
case SupBookRecord.sid: return new SupBookRecord(in);
case TabIdRecord.sid: return new TabIdRecord(in);
case TableStylesRecord.sid: return new TableStylesRecord(in);
case TableRecord.sid: return new TableRecord(in);
case TextObjectRecord.sid: return new TextObjectRecord(in);
case TextRecord.sid: return new TextRecord(in);
@ -225,65 +227,160 @@ public final class BiffViewer {
case WindowProtectRecord.sid: return new WindowProtectRecord(in);
case WindowTwoRecord.sid: return new WindowTwoRecord(in);
case WriteAccessRecord.sid: return new WriteAccessRecord(in);
case WriteProtectRecord.sid: return new WriteProtectRecord(in);
case WriteProtectRecord.sid: return new WriteProtectRecord(in);
// chart
case CatLabRecord.sid: return new CatLabRecord(in);
case ChartEndBlockRecord.sid: return new ChartEndBlockRecord(in);
case ChartEndObjectRecord.sid: return new ChartEndObjectRecord(in);
case ChartFRTInfoRecord.sid: return new ChartFRTInfoRecord(in);
case ChartStartBlockRecord.sid: return new ChartStartBlockRecord(in);
case ChartStartObjectRecord.sid: return new ChartStartObjectRecord(in);
// pivot table
case StreamIDRecord.sid: return new StreamIDRecord(in);
case ViewSourceRecord.sid: return new ViewSourceRecord(in);
case PageItemRecord.sid: return new PageItemRecord(in);
case ViewDefinitionRecord.sid: return new ViewDefinitionRecord(in);
case ViewFieldsRecord.sid: return new ViewFieldsRecord(in);
case DataItemRecord.sid: return new DataItemRecord(in);
case ExtendedPivotTableViewFieldsRecord.sid: return new ExtendedPivotTableViewFieldsRecord(in);
}
return new UnknownRecord(in);
}
private static final class CommandArgs {
private final boolean _biffhex;
private final boolean _noint;
private final boolean _out;
private final boolean _rawhex;
private final File _file;
private CommandArgs(boolean biffhex, boolean noint, boolean out, boolean rawhex, File file) {
_biffhex = biffhex;
_noint = noint;
_out = out;
_rawhex = rawhex;
_file = file;
}
public static CommandArgs parse(String[] args) throws CommandParseException {
int nArgs = args.length;
boolean biffhex = false;
boolean noint = false;
boolean out = false;
boolean rawhex = false;
File file = null;
for (int i=0; i<nArgs; i++) {
String arg = args[i];
if (arg.startsWith("--")) {
if ("--biffhex".equals(arg)) {
biffhex = true;
} else if ("--noint".equals(arg)) {
noint = true;
} else if ("--out".equals(arg)) {
out = true;
} else if ("--rawhex".equals(arg)) {
rawhex = true;
} else {
throw new CommandParseException("Unexpected option '" + arg + "'");
}
continue;
}
file = new File(arg);
if (!file.exists()) {
throw new CommandParseException("Specified file '" + arg + "' does not exist");
}
if (i+1<nArgs) {
throw new CommandParseException("File name must be the last arg");
}
}
if (file == null) {
throw new CommandParseException("Biff viewer needs a filename");
}
return new CommandArgs(biffhex, noint, out, rawhex, file);
}
public boolean shouldDumpBiffHex() {
return _biffhex;
}
public boolean shouldDumpRecordInterpretations() {
return !_noint;
}
public boolean shouldOutputToFile() {
return _out;
}
public boolean shouldOutputRawHexOnly() {
return _rawhex;
}
public File getFile() {
return _file;
}
}
private static final class CommandParseException extends Exception {
public CommandParseException(String msg) {
super(msg);
}
}
/**
* Method main with 1 argument just run straight biffview against given
* file<P>
*
* with 2 arguments where the second argument is "on" - run biffviewer<P>
* <b>Usage</b>:<br/>
*
* with hex dumps of records <P>
* BiffViewer [--biffhex] [--noint] [--out] &lt;fileName&gt; <br/>
* BiffViewer --rawhex [--out] &lt;fileName&gt; <br/>
* <br/>
*
* <table>
* <tr><td>--biffhex</td><td>show hex dump of each BIFF record</td></tr>
* <tr><td>--noint</td><td>do not output interpretation of BIFF records</td></tr>
* <tr><td>--out</td><td>send output to &lt;fileName&gt;.out</td></tr>
* <tr><td>--rawhex</td><td>output raw hex dump of whole workbook stream</td></tr>
* </table>
*
* with 2 arguments where the second argument is "bfd" just run a big fat
* hex dump of the file...don't worry about biffviewing it at all
* <p>
* Define the system property <code>poi.deserialize.escher</code> to turn on
* deserialization of escher records.
*
*/
public static void main(String[] args) {
System.setProperty("poi.deserialize.escher", "true");
if (args.length == 0) {
System.out.println( "Biff viewer needs a filename" );
CommandArgs cmdArgs;
try {
cmdArgs = CommandArgs.parse(args);
} catch (CommandParseException e) {
e.printStackTrace();
return;
}
System.setProperty("poi.deserialize.escher", "true");
try {
String inFileName = args[0];
File inputFile = new File(inFileName);
if(!inputFile.exists()) {
throw new RuntimeException("specified inputFile '" + inFileName + "' does not exist");
}
PrintStream ps;
if (false) { // set to true to output to file
OutputStream os = new FileOutputStream(inFileName + ".out");
if (cmdArgs.shouldOutputToFile()) {
OutputStream os = new FileOutputStream(cmdArgs.getFile().getAbsolutePath() + ".out");
ps = new PrintStream(os);
} else {
ps = System.out;
}
if (args.length > 1 && args[1].equals("bfd")) {
POIFSFileSystem fs = new POIFSFileSystem(new FileInputStream(inputFile));
InputStream stream = fs.createDocumentInputStream("Workbook");
int size = stream.available();
POIFSFileSystem fs = new POIFSFileSystem(new FileInputStream(cmdArgs.getFile()));
InputStream is = fs.createDocumentInputStream("Workbook");
if (cmdArgs.shouldOutputRawHexOnly()) {
int size = is.available();
byte[] data = new byte[size];
stream.read(data);
is.read(data);
HexDump.dump(data, 0, System.out, 0);
} else {
boolean dumpInterpretedRecords = true;
boolean dumpHex = args.length > 1 && args[1].equals("on");
POIFSFileSystem fs = new POIFSFileSystem(new FileInputStream(inputFile));
InputStream is = fs.createDocumentInputStream("Workbook");
BiffRecordListener recListener = new BiffRecordListener(dumpHex ? new OutputStreamWriter(ps) : null);
boolean dumpInterpretedRecords = cmdArgs.shouldDumpRecordInterpretations();
boolean dumpHex = cmdArgs.shouldDumpBiffHex();
boolean zeroAlignHexDump = dumpInterpretedRecords;
BiffRecordListener recListener = new BiffRecordListener(dumpHex ? new OutputStreamWriter(ps) : null, zeroAlignHexDump);
is = new BiffDumpingStream(is, recListener);
createRecords(is, ps, recListener, dumpInterpretedRecords);
}
@ -295,10 +392,12 @@ public final class BiffViewer {
private static final class BiffRecordListener implements IBiffRecordListener {
private final Writer _hexDumpWriter;
private final List _headers;
public BiffRecordListener(Writer hexDumpWriter) {
private final List<String> _headers;
private final boolean _zeroAlignEachRecord;
public BiffRecordListener(Writer hexDumpWriter, boolean zeroAlignEachRecord) {
_hexDumpWriter = hexDumpWriter;
_headers = new ArrayList();
_zeroAlignEachRecord = zeroAlignEachRecord;
_headers = new ArrayList<String>();
}
public void processRecord(int globalOffset, int recordCounter, int sid, int dataSize,
@ -310,7 +409,7 @@ public final class BiffViewer {
try {
w.write(header);
w.write(NEW_LINE_CHARS);
hexDumpAligned(w, data, 0, dataSize+4, globalOffset);
hexDumpAligned(w, data, dataSize+4, globalOffset, _zeroAlignEachRecord);
w.flush();
} catch (IOException e) {
throw new RuntimeException(e);
@ -332,11 +431,11 @@ public final class BiffViewer {
return sb.toString();
}
}
private static interface IBiffRecordListener {
void processRecord(int globalOffset, int recordCounter, int sid, int dataSize, byte[] data);
}
/**
@ -352,7 +451,7 @@ public final class BiffViewer {
private int _currentPos;
private int _currentSize;
private boolean _innerHasReachedEOF;
public BiffDumpingStream(InputStream is, IBiffRecordListener listener) {
_is = new DataInputStream(is);
_listener = listener;
@ -431,33 +530,40 @@ public final class BiffViewer {
_is.close();
}
}
private static final int DUMP_LINE_LEN = 16;
private static final char[] COLUMN_SEPARATOR = " | ".toCharArray();
/**
* Hex-dumps a portion of a byte array in typical format, also preserving dump-line alignment
* @param globalOffset (somewhat arbitrary) used to calculate the addresses printed at the
* start of each line
* Hex-dumps a portion of a byte array in typical format, also preserving dump-line alignment
* @param globalOffset (somewhat arbitrary) used to calculate the addresses printed at the
* start of each line
*/
static void hexDumpAligned(Writer w, byte[] data, int baseDataOffset, int dumpLen, int globalOffset) {
static void hexDumpAligned(Writer w, byte[] data, int dumpLen, int globalOffset,
boolean zeroAlignEachRecord) {
int baseDataOffset = 0;
// perhaps this code should be moved to HexDump
int globalStart = globalOffset + baseDataOffset;
int globalEnd = globalOffset + baseDataOffset + dumpLen;
int startDelta = globalStart % DUMP_LINE_LEN;
int endDelta = globalEnd % DUMP_LINE_LEN;
if (zeroAlignEachRecord) {
startDelta = 0;
endDelta = 0;
}
int startLineAddr = globalStart - startDelta;
int endLineAddr = globalEnd - endDelta;
int lineDataOffset = baseDataOffset - startDelta;
int lineAddr = startLineAddr;
// output (possibly incomplete) first line
if (startLineAddr == endLineAddr) {
hexDumpLine(w, data, lineAddr, lineDataOffset, startDelta, endDelta);
return;
}
hexDumpLine(w, data, lineAddr, lineDataOffset, startDelta, DUMP_LINE_LEN);
// output all full lines in the middle
while (true) {
lineAddr += DUMP_LINE_LEN;
@ -467,8 +573,8 @@ public final class BiffViewer {
}
hexDumpLine(w, data, lineAddr, lineDataOffset, 0, DUMP_LINE_LEN);
}
// output (possibly incomplete) last line
if (endDelta != 0) {
hexDumpLine(w, data, lineAddr, lineDataOffset, 0, endDelta);
@ -528,4 +634,3 @@ public final class BiffViewer {
w.write(buf);
}
}

View File

@ -66,6 +66,7 @@ import org.apache.poi.hssf.record.aggregates.ConditionalFormattingTable;
import org.apache.poi.hssf.record.aggregates.DataValidityTable;
import org.apache.poi.hssf.record.aggregates.MergedCellsTable;
import org.apache.poi.hssf.record.aggregates.PageSettingsBlock;
import org.apache.poi.hssf.record.pivottable.ViewDefinitionRecord;
/**
* Finds correct insert positions for records in workbook streams<p/>
@ -337,7 +338,7 @@ final class RecordOrderer {
*/
public static boolean isEndOfRowBlock(int sid) {
switch(sid) {
case UnknownRecord.SXVIEW_00B0:
case ViewDefinitionRecord.sid:
// should have been prefixed with DrawingRecord (0x00EC), but bug 46280 seems to allow this
case DrawingRecord.sid:
case DrawingSelectionRecord.sid:
@ -378,7 +379,6 @@ final class RecordOrderer {
case SharedFormulaRecord.sid:
case TableRecord.sid:
return true;
}
return false;
}

View File

@ -31,6 +31,7 @@ import java.util.Map;
import java.util.Set;
import org.apache.poi.hssf.record.chart.*;
import org.apache.poi.hssf.record.pivottable.*;
/**
* Title: Record Factory<P>
@ -51,7 +52,8 @@ public final class RecordFactory {
* contains the classes for all the records we want to parse.<br/>
* Note - this most but not *every* subclass of Record.
*/
private static final Class[] recordClasses = {
@SuppressWarnings("unchecked")
private static final Class<? extends Record>[] recordClasses = new Class[] {
ArrayRecord.class,
BackupRecord.class,
BlankRecord.class,
@ -148,6 +150,7 @@ public final class RecordFactory {
SupBookRecord.class,
TabIdRecord.class,
TableRecord.class,
TableStylesRecord.class,
TextObjectRecord.class,
TopMarginRecord.class,
UncalcedRecord.class,
@ -161,18 +164,26 @@ public final class RecordFactory {
WriteProtectRecord.class,
WSBoolRecord.class,
LinkedDataRecord.class,
// chart records
BeginRecord.class,
ChartFRTInfoRecord.class,
ChartStartBlockRecord.class,
ChartEndBlockRecord.class,
ChartStartObjectRecord.class,
ChartEndObjectRecord.class,
CatLabRecord.class,
BeginRecord.class,
EndRecord.class,
LinkedDataRecord.class,
SeriesToChartGroupRecord.class,
// pivot table records
DataItemRecord.class,
ExtendedPivotTableViewFieldsRecord.class,
PageItemRecord.class,
StreamIDRecord.class,
ViewDefinitionRecord.class,
ViewFieldsRecord.class,
ViewSourceRecord.class,
};
/**
@ -291,7 +302,7 @@ public final class RecordFactory {
_allKnownRecordSIDs = results;
}
return (short[]) _allKnownRecordSIDs.clone();
return _allKnownRecordSIDs.clone();
}
/**
@ -299,13 +310,13 @@ public final class RecordFactory {
* @return map of SIDs to short,short,byte[] constructors for Record classes
* most of org.apache.poi.hssf.record.*
*/
private static Map recordsToMap(Class [] records) {
Map result = new HashMap();
Set uniqueRecClasses = new HashSet(records.length * 3 / 2);
private static Map<Short, Constructor<? extends Record>> recordsToMap(Class<? extends Record> [] records) {
Map<Short, Constructor<? extends Record>> result = new HashMap<Short, Constructor<? extends Record>>();
Set<Class<?>> uniqueRecClasses = new HashSet<Class<?>>(records.length * 3 / 2);
for (int i = 0; i < records.length; i++) {
Class recClass = records[ i ];
Class<? extends Record> recClass = records[ i ];
if(!Record.class.isAssignableFrom(recClass)) {
throw new RuntimeException("Invalid record sub-class (" + recClass.getName() + ")");
}
@ -317,7 +328,7 @@ public final class RecordFactory {
}
short sid;
Constructor constructor;
Constructor<? extends Record> constructor;
try {
sid = recClass.getField("sid").getShort(null);
constructor = recClass.getConstructor(CONSTRUCTOR_ARGS);
@ -327,7 +338,7 @@ public final class RecordFactory {
}
Short key = new Short(sid);
if (result.containsKey(key)) {
Class prev = (Class)result.get(key);
Class prev = result.get(key).getDeclaringClass();
throw new RuntimeException("duplicate record sid 0x" + Integer.toHexString(sid).toUpperCase()
+ " for classes (" + recClass.getName() + ") and (" + prev.getName() + ")");
}
@ -347,7 +358,7 @@ public final class RecordFactory {
*/
public static List createRecords(InputStream in) throws RecordFormatException {
List records = new ArrayList(NUM_RECORDS);
List<Record> records = new ArrayList<Record>(NUM_RECORDS);
RecordInputStream recStream = new RecordInputStream(in);
DrawingRecord lastDrawingRecord = new DrawingRecord( );
@ -401,7 +412,7 @@ public final class RecordFactory {
} else if (lastRecord instanceof EOFRecord) {
// This is really odd, but excel still sometimes
// outputs a file like this all the same
records.add(record);
records.add(record);
} else {
throw new RecordFormatException("Unhandled Continue Record");
}
@ -416,7 +427,7 @@ public final class RecordFactory {
return records;
}
private static void addAll(List destList, Record[] srcRecs) {
private static void addAll(List<Record> destList, Record[] srcRecs) {
for (int i = 0; i < srcRecs.length; i++) {
destList.add(srcRecs[i]);
}

View File

@ -0,0 +1,94 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndianOutput;
import org.apache.poi.util.StringUtil;
/**
* TABLESTYLES (0x088E)<br/>
*
* @author Patrick Cheng
*/
public final class TableStylesRecord extends StandardRecord {
public static final short sid = 0x088E;
private int rt;
private int grbitFrt;
private byte[] unused = new byte[8];
private int cts;
private String rgchDefListStyle;
private String rgchDefPivotStyle;
public TableStylesRecord(RecordInputStream in) {
rt = in.readUShort();
grbitFrt = in.readUShort();
in.readFully(unused);
cts = in.readInt();
int cchDefListStyle = in.readUShort();
int cchDefPivotStyle = in.readUShort();
rgchDefListStyle = in.readUnicodeLEString(cchDefListStyle);
rgchDefPivotStyle = in.readUnicodeLEString(cchDefPivotStyle);
}
@Override
protected void serialize(LittleEndianOutput out) {
out.writeShort(rt);
out.writeShort(grbitFrt);
out.write(unused);
out.writeInt(cts);
out.writeShort(rgchDefListStyle.length());
out.writeShort(rgchDefPivotStyle.length());
StringUtil.putUnicodeLE(rgchDefListStyle, out);
StringUtil.putUnicodeLE(rgchDefPivotStyle, out);
}
@Override
protected int getDataSize() {
return 2 + 2 + 8 + 4 + 2 + 2
+ (2*rgchDefListStyle.length()) + (2*rgchDefPivotStyle.length());
}
@Override
public short getSid() {
return sid;
}
@Override
public String toString() {
StringBuffer buffer = new StringBuffer();
buffer.append("[TABLESTYLES]\n");
buffer.append(" .rt =").append(HexDump.shortToHex(rt)).append('\n');
buffer.append(" .grbitFrt=").append(HexDump.shortToHex(grbitFrt)).append('\n');
buffer.append(" .unused =").append(HexDump.toHex(unused)).append('\n');
buffer.append(" .cts=").append(HexDump.intToHex(cts)).append('\n');
buffer.append(" .rgchDefListStyle=").append(rgchDefListStyle).append('\n');
buffer.append(" .rgchDefPivotStyle=").append(rgchDefPivotStyle).append('\n');
buffer.append("[/TABLESTYLES]\n");
return buffer.toString();
}
}

View File

@ -39,7 +39,6 @@ public final class UnknownRecord extends StandardRecord {
public static final int SHEETPR_0081 = 0x0081;
public static final int STANDARDWIDTH_0099 = 0x0099;
public static final int SCL_00A0 = 0x00A0;
public static final int SXVIEW_00B0 = 0x00B0;
public static final int BITMAP_00E9 = 0x00E9;
public static final int PHONETICPR_00EF = 0x00EF;
public static final int LABELRANGES_015F = 0x015F;
@ -122,21 +121,19 @@ public final class UnknownRecord extends StandardRecord {
// this method any time a new Record subclass is created.
switch (sid) {
case PLS_004D: return "PLS";
case 0x0050: return "DCON";
case 0x0050: return "DCON"; // Data Consolidation Information
case 0x007F: return "IMDATA";
case SHEETPR_0081: return "SHEETPR";
case 0x0090: return "SORT";
case 0x0094: return "LHRECORD";
case STANDARDWIDTH_0099: return "STANDARDWIDTH";
case 0x009D: return "AUTOFILTERINFO";
case SCL_00A0: return "SCL";
case 0x00AE: return "SCENMAN";
case SXVIEW_00B0: return "SXVIEW"; // (pivot table) View Definition
case 0x00B1: return "SXVD"; // (pivot table) View Fields
case 0x0090: return "SORT"; // Sorting Options
case 0x0094: return "LHRECORD"; // .WK? File Conversion Information
case STANDARDWIDTH_0099: return "STANDARDWIDTH"; //Standard Column Width
case 0x009D: return "AUTOFILTERINFO"; // Drop-Down Arrow Count
case SCL_00A0: return "SCL"; // Window Zoom Magnification
case 0x00AE: return "SCENMAN"; // Scenario Output Data
case 0x00B2: return "SXVI"; // (pivot table) View Item
case 0x00B4: return "SXIVD"; // (pivot table) Row/Column Field IDs
case 0x00B5: return "SXLI"; // (pivot table) Line Item Array
case 0x00C5: return "SXDI"; // (pivot table) Data Item
case 0x00D3: return "OBPROJ";
case 0x00DC: return "PARAMQRY";
@ -144,7 +141,6 @@ public final class UnknownRecord extends StandardRecord {
case BITMAP_00E9: return "BITMAP";
case PHONETICPR_00EF: return "PHONETICPR";
case 0x00F1: return "SXEX"; // PivotTable View Extended Information
case 0x0100: return "SXVDEX"; // Extended PivotTable View Fields
case LABELRANGES_015F: return "LABELRANGES";
case 0x01BA: return "CODENAME";
@ -178,7 +174,6 @@ public final class UnknownRecord extends StandardRecord {
case 0x088B: return "PLV";
case 0x088C: return "COMPAT12";
case 0x088D: return "DXF";
case 0x088E: return "TABLESTYLES";
case 0x0892: return "STYLEEXT";
case 0x0896: return "THEME";
case 0x0897: return "GUIDTYPELIB";

View File

@ -0,0 +1,72 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.chart;
import org.apache.poi.hssf.record.RecordInputStream;
import org.apache.poi.hssf.record.StandardRecord;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndianOutput;
/**
* DATALABEXT - Chart Data Label Extension (0x086A) <br/>
*
* @author Patrick Cheng
*/
public final class DataLabelExtensionRecord extends StandardRecord {
public static final short sid = 0x086A;
private int rt;
private int grbitFrt;
private byte[] unused = new byte[8];
public DataLabelExtensionRecord(RecordInputStream in) {
rt = in.readShort();
grbitFrt = in.readShort();
in.readFully(unused);
}
@Override
protected int getDataSize() {
return 2 + 2 + 8;
}
@Override
public short getSid() {
return sid;
}
@Override
protected void serialize(LittleEndianOutput out) {
out.writeShort(rt);
out.writeShort(grbitFrt);
out.write(unused);
}
@Override
public String toString() {
StringBuffer buffer = new StringBuffer();
buffer.append("[DATALABEXT]\n");
buffer.append(" .rt =").append(HexDump.shortToHex(rt)).append('\n');
buffer.append(" .grbitFrt=").append(HexDump.shortToHex(grbitFrt)).append('\n');
buffer.append(" .unused =").append(HexDump.toHex(unused)).append('\n');
buffer.append("[/DATALABEXT]\n");
return buffer.toString();
}
}

View File

@ -0,0 +1,90 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.pivottable;
import org.apache.poi.hssf.record.RecordInputStream;
import org.apache.poi.hssf.record.StandardRecord;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndianOutput;
import org.apache.poi.util.StringUtil;
/**
* SXDI - Data Item (0x00C5)<br/>
*
* @author Patrick Cheng
*/
public final class DataItemRecord extends StandardRecord {
public static final short sid = 0x00C5;
private int isxvdData;
private int iiftab;
private int df;
private int isxvd;
private int isxvi;
private int ifmt;
private String name;
public DataItemRecord(RecordInputStream in) {
isxvdData = in.readUShort();
iiftab = in.readUShort();
df = in.readUShort();
isxvd = in.readUShort();
isxvi = in.readUShort();
ifmt = in.readUShort();
name = in.readString();
}
@Override
protected void serialize(LittleEndianOutput out) {
out.writeShort(isxvdData);
out.writeShort(iiftab);
out.writeShort(df);
out.writeShort(isxvd);
out.writeShort(isxvi);
out.writeShort(ifmt);
StringUtil.writeUnicodeString(out, name);
}
@Override
protected int getDataSize() {
return 2 + 2 + 2 + 2 + 2 + 2 + StringUtil.getEncodedSize(name);
}
@Override
public short getSid() {
return sid;
}
@Override
public String toString() {
StringBuffer buffer = new StringBuffer();
buffer.append("[SXDI]\n");
buffer.append(" .isxvdData = ").append(HexDump.shortToHex(isxvdData)).append("\n");
buffer.append(" .iiftab = ").append(HexDump.shortToHex(iiftab)).append("\n");
buffer.append(" .df = ").append(HexDump.shortToHex(df)).append("\n");
buffer.append(" .isxvd = ").append(HexDump.shortToHex(isxvd)).append("\n");
buffer.append(" .isxvi = ").append(HexDump.shortToHex(isxvi)).append("\n");
buffer.append(" .ifmt = ").append(HexDump.shortToHex(ifmt)).append("\n");
buffer.append("[/SXDI]\n");
return buffer.toString();
}
}

View File

@ -0,0 +1,111 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.pivottable;
import org.apache.poi.hssf.record.RecordInputStream;
import org.apache.poi.hssf.record.StandardRecord;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndianOutput;
import org.apache.poi.util.StringUtil;
/**
* SXVDEX - Extended PivotTable View Fields (0x0100)<br/>
*
* @author Patrick Cheng
*/
public final class ExtendedPivotTableViewFieldsRecord extends StandardRecord {
public static final short sid = 0x0100;
/** the value of the <tt>cchSubName</tt> field when the subName is not present */
private static final int STRING_NOT_PRESENT_LEN = -1;
private int grbit1;
private int grbit2;
private int citmShow;
private int isxdiSort;
private int isxdiShow;
private int reserved1;
private int reserved2;
private String subName;
public ExtendedPivotTableViewFieldsRecord(RecordInputStream in) {
grbit1 = in.readInt();
grbit2 = in.readUByte();
citmShow = in.readUByte();
isxdiSort = in.readUShort();
isxdiShow = in.readUShort();
int cchSubName = in.readUShort();
reserved1 = in.readInt();
reserved2 = in.readInt();
if (cchSubName != STRING_NOT_PRESENT_LEN) {
subName = in.readUnicodeLEString(cchSubName);
}
}
@Override
protected void serialize(LittleEndianOutput out) {
out.writeInt(grbit1);
out.writeByte(grbit2);
out.writeByte(citmShow);
out.writeShort(isxdiSort);
out.writeShort(isxdiShow);
if (subName == null) {
out.writeShort(STRING_NOT_PRESENT_LEN);
} else {
out.writeShort(subName.length());
}
out.writeInt(reserved1);
out.writeInt(reserved2);
if (subName != null) {
StringUtil.putUnicodeLE(subName, out);
}
}
@Override
protected int getDataSize() {
return 4 + 1 + 1 + 2 + 2 + 2 + 4 + 4 +
(subName == null ? 0 : (2*subName.length())); // in unicode
}
@Override
public short getSid() {
return sid;
}
@Override
public String toString() {
StringBuffer buffer = new StringBuffer();
buffer.append("[SXVDEX]\n");
buffer.append(" .grbit1 =").append(HexDump.intToHex(grbit1)).append("\n");
buffer.append(" .grbit2 =").append(HexDump.byteToHex(grbit2)).append("\n");
buffer.append(" .citmShow =").append(HexDump.byteToHex(citmShow)).append("\n");
buffer.append(" .isxdiSort =").append(HexDump.shortToHex(isxdiSort)).append("\n");
buffer.append(" .isxdiShow =").append(HexDump.shortToHex(isxdiShow)).append("\n");
buffer.append(" .subName =").append(subName).append("\n");
buffer.append("[/SXVDEX]\n");
return buffer.toString();
}
}

View File

@ -0,0 +1,72 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.pivottable;
import org.apache.poi.hssf.record.RecordInputStream;
import org.apache.poi.hssf.record.StandardRecord;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndianOutput;
/**
* SXPI - Page Item (0x00B6)<br/>
*
* @author Patrick Cheng
*/
public final class PageItemRecord extends StandardRecord {
public static final short sid = 0x00B6;
private int isxvi;
private int isxvd;
private int idObj;
public PageItemRecord(RecordInputStream in) {
isxvi = in.readShort();
isxvd = in.readShort();
idObj = in.readShort();
}
@Override
protected void serialize(LittleEndianOutput out) {
out.writeShort(isxvi);
out.writeShort(isxvd);
out.writeShort(idObj);
}
@Override
protected int getDataSize() {
return 2 + 2 + 2;
}
@Override
public short getSid() {
return sid;
}
@Override
public String toString() {
StringBuffer buffer = new StringBuffer();
buffer.append("[SXPI]\n");
buffer.append(" .isxvi =").append(HexDump.shortToHex(isxvi)).append('\n');
buffer.append(" .isxvd =").append(HexDump.shortToHex(isxvd)).append('\n');
buffer.append(" .idObj =").append(HexDump.shortToHex(idObj)).append('\n');
buffer.append("[/SXPI]\n");
return buffer.toString();
}
}

View File

@ -0,0 +1,64 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.pivottable;
import org.apache.poi.hssf.record.RecordInputStream;
import org.apache.poi.hssf.record.StandardRecord;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndianOutput;
/**
* SXIDSTM - Stream ID (0x00D5)<br/>
*
* @author Patrick Cheng
*/
public final class StreamIDRecord extends StandardRecord {
public static final short sid = 0x00D5;
private int idstm;
public StreamIDRecord(RecordInputStream in) {
idstm = in.readShort();
}
@Override
protected void serialize(LittleEndianOutput out) {
out.writeShort(idstm);
}
@Override
protected int getDataSize() {
return 2;
}
@Override
public short getSid() {
return sid;
}
@Override
public String toString() {
StringBuffer buffer = new StringBuffer();
buffer.append("[SXIDSTM]\n");
buffer.append(" .idstm =").append(HexDump.shortToHex(idstm)).append('\n');
buffer.append("[/SXIDSTM]\n");
return buffer.toString();
}
}

View File

@ -0,0 +1,162 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.pivottable;
import org.apache.poi.hssf.record.RecordInputStream;
import org.apache.poi.hssf.record.StandardRecord;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndianOutput;
import org.apache.poi.util.StringUtil;
/**
* SXVIEW - View Definition (0x00B0)<br/>
*
* @author Patrick Cheng
*/
public final class ViewDefinitionRecord extends StandardRecord {
public static final short sid = 0x00B0;
private int rwFirst;
private int rwLast;
private int colFirst;
private int colLast;
private int rwFirstHead;
private int rwFirstData;
private int colFirstData;
private int iCache;
private int reserved;
private int sxaxis4Data;
private int ipos4Data;
private int cDim;
private int cDimRw;
private int cDimCol;
private int cDimPg;
private int cDimData;
private int cRw;
private int cCol;
private int grbit;
private int itblAutoFmt;
private String dataField;
private String name;
public ViewDefinitionRecord(RecordInputStream in) {
rwFirst = in.readUShort();
rwLast = in.readUShort();
colFirst = in.readUShort();
colLast = in.readUShort();
rwFirstHead = in.readUShort();
rwFirstData = in.readUShort();
colFirstData = in.readUShort();
iCache = in.readUShort();
reserved = in.readUShort();
sxaxis4Data = in.readUShort();
ipos4Data = in.readUShort();
cDim = in.readUShort();
cDimRw = in.readUShort();
cDimCol = in.readUShort();
cDimPg = in.readUShort();
cDimData = in.readUShort();
cRw = in.readUShort();
cCol = in.readUShort();
grbit = in.readUShort();
itblAutoFmt = in.readUShort();
int cchName = in.readUShort();
int cchData = in.readUShort();
name = StringUtil.readUnicodeString(in, cchName);
dataField = StringUtil.readUnicodeString(in, cchData);
}
@Override
protected void serialize(LittleEndianOutput out) {
out.writeShort(rwFirst);
out.writeShort(rwLast);
out.writeShort(colFirst);
out.writeShort(colLast);
out.writeShort(rwFirstHead);
out.writeShort(rwFirstData);
out.writeShort(colFirstData);
out.writeShort(iCache);
out.writeShort(reserved);
out.writeShort(sxaxis4Data);
out.writeShort(ipos4Data);
out.writeShort(cDim);
out.writeShort(cDimRw);
out.writeShort(cDimCol);
out.writeShort(cDimPg);
out.writeShort(cDimData);
out.writeShort(cRw);
out.writeShort(cCol);
out.writeShort(grbit);
out.writeShort(itblAutoFmt);
out.writeShort(name.length());
out.writeShort(dataField.length());
StringUtil.writeUnicodeStringFlagAndData(out, name);
StringUtil.writeUnicodeStringFlagAndData(out, dataField);
}
@Override
protected int getDataSize() {
return 40 + // 20 short fields (rwFirst ... itblAutoFmt)
StringUtil.getEncodedSize(name) + StringUtil.getEncodedSize(dataField) ;
}
@Override
public short getSid() {
return sid;
}
@Override
public String toString() {
StringBuffer buffer = new StringBuffer();
buffer.append("[SXVIEW]\n");
buffer.append(" .rwFirst =").append(HexDump.shortToHex(rwFirst)).append('\n');
buffer.append(" .rwLast =").append(HexDump.shortToHex(rwLast)).append('\n');
buffer.append(" .colFirst =").append(HexDump.shortToHex(colFirst)).append('\n');
buffer.append(" .colLast =").append(HexDump.shortToHex(colLast)).append('\n');
buffer.append(" .rwFirstHead =").append(HexDump.shortToHex(rwFirstHead)).append('\n');
buffer.append(" .rwFirstData =").append(HexDump.shortToHex(rwFirstData)).append('\n');
buffer.append(" .colFirstData =").append(HexDump.shortToHex(colFirstData)).append('\n');
buffer.append(" .iCache =").append(HexDump.shortToHex(iCache)).append('\n');
buffer.append(" .reserved =").append(HexDump.shortToHex(reserved)).append('\n');
buffer.append(" .sxaxis4Data =").append(HexDump.shortToHex(sxaxis4Data)).append('\n');
buffer.append(" .ipos4Data =").append(HexDump.shortToHex(ipos4Data)).append('\n');
buffer.append(" .cDim =").append(HexDump.shortToHex(cDim)).append('\n');
buffer.append(" .cDimRw =").append(HexDump.shortToHex(cDimRw)).append('\n');
buffer.append(" .cDimCol =").append(HexDump.shortToHex(cDimCol)).append('\n');
buffer.append(" .cDimPg =").append(HexDump.shortToHex(cDimPg)).append('\n');
buffer.append(" .cDimData =").append(HexDump.shortToHex(cDimData)).append('\n');
buffer.append(" .cRw =").append(HexDump.shortToHex(cRw)).append('\n');
buffer.append(" .cCol =").append(HexDump.shortToHex(cCol)).append('\n');
buffer.append(" .grbit =").append(HexDump.shortToHex(grbit)).append('\n');
buffer.append(" .itblAutoFmt =").append(HexDump.shortToHex(itblAutoFmt)).append('\n');
buffer.append(" .name =").append(name).append('\n');
buffer.append(" .dataField =").append(dataField).append('\n');
buffer.append("[/SXVIEW]\n");
return buffer.toString();
}
}

View File

@ -0,0 +1,110 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.pivottable;
import org.apache.poi.hssf.record.RecordInputStream;
import org.apache.poi.hssf.record.StandardRecord;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndianOutput;
import org.apache.poi.util.StringUtil;
/**
* SXVD - View Fields (0x00B1)<br/>
*
* @author Patrick Cheng
*/
public final class ViewFieldsRecord extends StandardRecord {
public static final short sid = 0x00B1;
/** the value of the <tt>cchName</tt> field when the name is not present */
private static final int STRING_NOT_PRESENT_LEN = -1;
private int sxaxis;
private int cSub;
private int grbitSub;
private int cItm;
private String name = null;
/**
* values for the {@link ViewFieldsRecord#sxaxis} field
*/
private static final class Axis {
public static final int NO_AXIS = 0;
public static final int ROW = 1;
public static final int COLUMN = 2;
public static final int PAGE = 4;
public static final int DATA = 8;
}
public ViewFieldsRecord(RecordInputStream in) {
sxaxis = in.readShort();
cSub = in.readShort();
grbitSub = in.readShort();
cItm = in.readShort();
int cchName = in.readShort();
if (cchName != STRING_NOT_PRESENT_LEN) {
name = in.readCompressedUnicode(cchName);
}
}
@Override
protected void serialize(LittleEndianOutput out) {
out.writeShort(sxaxis);
out.writeShort(cSub);
out.writeShort(grbitSub);
out.writeShort(cItm);
if (name != null) {
StringUtil.writeUnicodeString(out, name);
} else {
out.writeShort(STRING_NOT_PRESENT_LEN);
}
}
@Override
protected int getDataSize() {
int cchName = 0;
if (name != null) {
cchName = name.length();
}
return 2 +2 + 2 + 2 + 2 + cchName;
}
@Override
public short getSid() {
return sid;
}
@Override
public String toString() {
StringBuffer buffer = new StringBuffer();
buffer.append("[SXVD]\n");
buffer.append(" .sxaxis = ").append(HexDump.shortToHex(sxaxis)).append('\n');
buffer.append(" .cSub = ").append(HexDump.shortToHex(cSub)).append('\n');
buffer.append(" .grbitSub = ").append(HexDump.shortToHex(grbitSub)).append('\n');
buffer.append(" .cItm = ").append(HexDump.shortToHex(cItm)).append('\n');
buffer.append(" .name = ").append(name).append('\n');
buffer.append("[/SXVD]\n");
return buffer.toString();
}
}

View File

@ -0,0 +1,64 @@
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hssf.record.pivottable;
import org.apache.poi.hssf.record.RecordInputStream;
import org.apache.poi.hssf.record.StandardRecord;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndianOutput;
/**
* SXVS - View Source (0x00E3)<br/>
*
* @author Patrick Cheng
*/
public final class ViewSourceRecord extends StandardRecord {
public static final short sid = 0x00E3;
private int vs;
public ViewSourceRecord(RecordInputStream in) {
vs = in.readShort();
}
@Override
protected void serialize(LittleEndianOutput out) {
out.writeShort(vs);
}
@Override
protected int getDataSize() {
return 2;
}
@Override
public short getSid() {
return sid;
}
@Override
public String toString() {
StringBuffer buffer = new StringBuffer();
buffer.append("[SXVS]\n");
buffer.append(" .vs =").append(HexDump.shortToHex(vs)).append('\n');
buffer.append("[/SXVS]\n");
return buffer.toString();
}
}

View File

@ -140,6 +140,25 @@ public class StringUtil {
}
return readUnicodeLE(in, nChars);
}
/**
* InputStream <tt>in</tt> is expected to contain:
* <ol>
* <li>byte is16BitFlag</li>
* <li>byte[]/char[] characterData</li>
* </ol>
* For this encoding, the is16BitFlag is always present even if nChars==0.
* <br/>
* This method should be used when the nChars field is <em>not</em> stored
* as a ushort immediately before the is16BitFlag. Otherwise, {@link
* #readUnicodeString(LittleEndianInput)} can be used.
*/
public static String readUnicodeString(LittleEndianInput in, int nChars) {
byte is16Bit = in.readByte();
if ((is16Bit & 0x01) == 0) {
return readCompressedUnicode(in, nChars);
}
return readUnicodeLE(in, nChars);
}
/**
* OutputStream <tt>out</tt> will get:
* <ol>
@ -161,7 +180,28 @@ public class StringUtil {
putCompressedUnicode(value, out);
}
}
/**
* OutputStream <tt>out</tt> will get:
* <ol>
* <li>byte is16BitFlag</li>
* <li>byte[]/char[] characterData</li>
* </ol>
* For this encoding, the is16BitFlag is always present even if nChars==0.
* <br/>
* This method should be used when the nChars field is <em>not</em> stored
* as a ushort immediately before the is16BitFlag. Otherwise, {@link
* #writeUnicodeString(LittleEndianOutput, String)} can be used.
*/
public static void writeUnicodeStringFlagAndData(LittleEndianOutput out, String value) {
boolean is16Bit = hasMultibyte(value);
out.writeByte(is16Bit ? 0x01 : 0x00);
if (is16Bit) {
putUnicodeLE(value, out);
} else {
putCompressedUnicode(value, out);
}
}
/**
* @return the number of bytes that would be written by {@link #writeUnicodeString(LittleEndianOutput, String)}
*/