Removed HDF API
Sonar fixes git-svn-id: https://svn.apache.org/repos/asf/poi/trunk@1705779 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
5cd3436295
commit
6f7f16853e
@ -19,6 +19,7 @@ package org.apache.poi.hslf.examples;
|
||||
|
||||
import java.awt.Rectangle;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.poi.hslf.usermodel.HSLFHyperlink;
|
||||
import org.apache.poi.hslf.usermodel.HSLFSlide;
|
||||
@ -27,17 +28,14 @@ import org.apache.poi.hslf.usermodel.HSLFTextBox;
|
||||
|
||||
/**
|
||||
* Demonstrates how to create hyperlinks in PowerPoint presentations
|
||||
*
|
||||
* @author Yegor Kozlov
|
||||
*/
|
||||
public final class CreateHyperlink {
|
||||
public abstract class CreateHyperlink {
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public static void main(String[] args) throws Exception {
|
||||
public static void main(String[] args) throws IOException {
|
||||
HSLFSlideShow ppt = new HSLFSlideShow();
|
||||
|
||||
HSLFSlide slideA = ppt.createSlide();
|
||||
HSLFSlide slideB = ppt.createSlide();
|
||||
ppt.createSlide();
|
||||
HSLFSlide slideC = ppt.createSlide();
|
||||
|
||||
// link to a URL
|
||||
|
@ -17,18 +17,16 @@
|
||||
package org.apache.poi.hslf.examples;
|
||||
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.poi.hslf.model.HeadersFooters;
|
||||
import org.apache.poi.hslf.usermodel.HSLFSlide;
|
||||
import org.apache.poi.hslf.usermodel.HSLFSlideShow;
|
||||
|
||||
/**
|
||||
* Demonstrates how to set headers / footers
|
||||
*
|
||||
* @author Yegor Kozlov
|
||||
*/
|
||||
public class HeadersFootersDemo {
|
||||
public static void main(String[] args) throws Exception {
|
||||
public abstract class HeadersFootersDemo {
|
||||
public static void main(String[] args) throws IOException {
|
||||
HSLFSlideShow ppt = new HSLFSlideShow();
|
||||
|
||||
HeadersFooters slideHeaders = ppt.getSlideHeadersFooters();
|
||||
@ -40,7 +38,7 @@ public class HeadersFootersDemo {
|
||||
notesHeaders.setFootersText("My notes footers");
|
||||
notesHeaders.setHeaderText("My notes header");
|
||||
|
||||
HSLFSlide slide = ppt.createSlide();
|
||||
ppt.createSlide();
|
||||
|
||||
FileOutputStream out = new FileOutputStream("headers_footers.ppt");
|
||||
ppt.write(out);
|
||||
|
@ -158,7 +158,7 @@ public class BigExample {
|
||||
|
||||
// demonstrate adding/naming and deleting a sheet
|
||||
// create a sheet, set its title then delete it
|
||||
s = wb.createSheet();
|
||||
wb.createSheet();
|
||||
wb.setSheetName(1, "DeletedSheet");
|
||||
wb.removeSheetAt(1);
|
||||
//end deleted sheet
|
||||
@ -167,5 +167,6 @@ public class BigExample {
|
||||
// close our file (don't blow out our file handles
|
||||
wb.write(out);
|
||||
out.close();
|
||||
wb.close();
|
||||
}
|
||||
}
|
||||
|
@ -38,9 +38,6 @@ import org.apache.poi.ss.util.CellRangeAddress;
|
||||
* THIS IS NOT THE MAIN HSSF FILE!! This is a utility for testing functionality.
|
||||
* It does contain sample API usage that may be educational to regular API
|
||||
* users.
|
||||
*
|
||||
* @see #main
|
||||
* @author Andrew Oliver (acoliver at apache dot org)
|
||||
*/
|
||||
public final class HSSFReadWrite {
|
||||
|
||||
@ -48,7 +45,12 @@ public final class HSSFReadWrite {
|
||||
* creates an {@link HSSFWorkbook} the specified OS filename.
|
||||
*/
|
||||
private static HSSFWorkbook readFile(String filename) throws IOException {
|
||||
return new HSSFWorkbook(new FileInputStream(filename));
|
||||
FileInputStream fis = new FileInputStream(filename);
|
||||
try {
|
||||
return new HSSFWorkbook(fis);
|
||||
} finally {
|
||||
fis.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -115,7 +117,7 @@ public final class HSSFReadWrite {
|
||||
|
||||
// end draw thick black border
|
||||
// create a sheet, set its title then delete it
|
||||
s = wb.createSheet();
|
||||
wb.createSheet();
|
||||
wb.setSheetName(1, "DeletedSheet");
|
||||
wb.removeSheetAt(1);
|
||||
|
||||
@ -123,6 +125,8 @@ public final class HSSFReadWrite {
|
||||
FileOutputStream out = new FileOutputStream(outputFilename);
|
||||
wb.write(out);
|
||||
out.close();
|
||||
|
||||
wb.close();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -198,6 +202,7 @@ public final class HSSFReadWrite {
|
||||
}
|
||||
}
|
||||
}
|
||||
wb.close();
|
||||
} else if (args.length == 2) {
|
||||
if (args[1].toLowerCase(Locale.ROOT).equals("write")) {
|
||||
System.out.println("Write mode");
|
||||
@ -213,6 +218,7 @@ public final class HSSFReadWrite {
|
||||
|
||||
wb.write(stream);
|
||||
stream.close();
|
||||
wb.close();
|
||||
}
|
||||
} else if (args.length == 3 && args[2].toLowerCase(Locale.ROOT).equals("modify1")) {
|
||||
// delete row 0-24, row 74 - 99 && change cell 3 on row 39 to string "MODIFIED CELL!!"
|
||||
@ -237,6 +243,7 @@ public final class HSSFReadWrite {
|
||||
|
||||
wb.write(stream);
|
||||
stream.close();
|
||||
wb.close();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
|
@ -17,27 +17,27 @@
|
||||
|
||||
package org.apache.poi.hssf.usermodel.examples;
|
||||
|
||||
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
|
||||
import org.apache.poi.hssf.usermodel.HSSFSheet;
|
||||
import org.apache.poi.ss.util.WorkbookUtil;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
|
||||
import org.apache.poi.ss.util.WorkbookUtil;
|
||||
|
||||
/**
|
||||
* Creates a new workbook with a sheet that's been explicitly defined.
|
||||
*
|
||||
* @author Glen Stampoultzis (glens at apache.org)
|
||||
*/
|
||||
public class NewSheet {
|
||||
public abstract class NewSheet {
|
||||
public static void main(String[] args) throws IOException {
|
||||
HSSFWorkbook wb = new HSSFWorkbook();
|
||||
HSSFSheet sheet1 = wb.createSheet("new sheet");
|
||||
HSSFSheet sheet2 = wb.createSheet(); // create with default name
|
||||
wb.createSheet("new sheet");
|
||||
// create with default name
|
||||
wb.createSheet();
|
||||
final String name = "second sheet";
|
||||
wb.setSheetName(1, WorkbookUtil.createSafeSheetName(name)); // setting sheet name later
|
||||
// setting sheet name later
|
||||
wb.setSheetName(1, WorkbookUtil.createSafeSheetName(name));
|
||||
FileOutputStream fileOut = new FileOutputStream("workbook.xls");
|
||||
wb.write(fileOut);
|
||||
fileOut.close();
|
||||
wb.close();
|
||||
}
|
||||
}
|
||||
|
@ -21,23 +21,24 @@ package org.apache.poi.xslf.usermodel;
|
||||
|
||||
import java.awt.Dimension;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import org.apache.poi.openxml4j.exceptions.OpenXML4JException;
|
||||
import org.apache.poi.openxml4j.opc.PackagePart;
|
||||
|
||||
/**
|
||||
* Demonstrates how you can extract data from a .pptx file
|
||||
*
|
||||
* @author Yegor Kozlov
|
||||
*/
|
||||
public final class DataExtraction {
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public static void main(String args[]) throws Exception {
|
||||
public static void main(String args[]) throws IOException, OpenXML4JException {
|
||||
|
||||
PrintStream out = System.out;
|
||||
|
||||
if (args.length == 0) {
|
||||
System.out.println("Input file is required");
|
||||
out.println("Input file is required");
|
||||
return;
|
||||
}
|
||||
|
||||
@ -46,10 +47,11 @@ public final class DataExtraction {
|
||||
is.close();
|
||||
|
||||
// Get the document's embedded files.
|
||||
List<PackagePart> embeds = ppt.getAllEmbedds();
|
||||
for (PackagePart p : embeds) {
|
||||
for (PackagePart p : ppt.getAllEmbedds()) {
|
||||
String type = p.getContentType();
|
||||
String name = p.getPartName().getName(); //typically file name
|
||||
// typically file name
|
||||
String name = p.getPartName().getName();
|
||||
out.println("Embedded file ("+type+"): "+name);
|
||||
|
||||
InputStream pIs = p.getInputStream();
|
||||
// make sense of the part data
|
||||
@ -58,33 +60,31 @@ public final class DataExtraction {
|
||||
}
|
||||
|
||||
// Get the document's embedded files.
|
||||
List<XSLFPictureData> images = ppt.getPictureData();
|
||||
for (XSLFPictureData data : images) {
|
||||
PackagePart p = data.getPackagePart();
|
||||
|
||||
String type = p.getContentType();
|
||||
for (XSLFPictureData data : ppt.getPictureData()) {
|
||||
String type = data.getContentType();
|
||||
String name = data.getFileName();
|
||||
out.println("Picture ("+type+"): "+name);
|
||||
|
||||
InputStream pIs = p.getInputStream();
|
||||
InputStream pIs = data.getInputStream();
|
||||
// make sense of the image data
|
||||
pIs.close();
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
Dimension pageSize = ppt.getPageSize(); // size of the canvas in points
|
||||
// size of the canvas in points
|
||||
Dimension pageSize = ppt.getPageSize();
|
||||
out.println("Pagesize: "+pageSize);
|
||||
|
||||
for(XSLFSlide slide : ppt.getSlides()) {
|
||||
for(XSLFShape shape : slide){
|
||||
if(shape instanceof XSLFTextShape) {
|
||||
XSLFTextShape txShape = (XSLFTextShape)shape;
|
||||
System.out.println(txShape.getText());
|
||||
out.println(txShape.getText());
|
||||
} else if (shape instanceof XSLFPictureShape){
|
||||
XSLFPictureShape pShape = (XSLFPictureShape)shape;
|
||||
XSLFPictureData pData = pShape.getPictureData();
|
||||
System.out.println(pData.getFileName());
|
||||
out.println(pData.getFileName());
|
||||
} else {
|
||||
System.out.println("Process me: " + shape.getClass());
|
||||
out.println("Process me: " + shape.getClass());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -170,32 +170,30 @@ public class XLSX2CSV {
|
||||
this.formatString = null;
|
||||
String cellType = attributes.getValue("t");
|
||||
String cellStyleStr = attributes.getValue("s");
|
||||
if ("b".equals(cellType))
|
||||
if ("b".equals(cellType)) {
|
||||
nextDataType = xssfDataType.BOOL;
|
||||
else if ("e".equals(cellType))
|
||||
} else if ("e".equals(cellType)) {
|
||||
nextDataType = xssfDataType.ERROR;
|
||||
else if ("inlineStr".equals(cellType))
|
||||
} else if ("inlineStr".equals(cellType)) {
|
||||
nextDataType = xssfDataType.INLINESTR;
|
||||
else if ("s".equals(cellType))
|
||||
} else if ("s".equals(cellType)) {
|
||||
nextDataType = xssfDataType.SSTINDEX;
|
||||
else if ("str".equals(cellType))
|
||||
} else if ("str".equals(cellType)) {
|
||||
nextDataType = xssfDataType.FORMULA;
|
||||
else if (cellStyleStr != null) {
|
||||
} else if (cellStyleStr != null) {
|
||||
// It's a number, but almost certainly one
|
||||
// with a special style or format
|
||||
XSSFCellStyle style = null;
|
||||
if (cellStyleStr != null) {
|
||||
int styleIndex = Integer.parseInt(cellStyleStr);
|
||||
style = stylesTable.getStyleAt(styleIndex);
|
||||
}
|
||||
int styleIndex = Integer.parseInt(cellStyleStr);
|
||||
XSSFCellStyle style = stylesTable.getStyleAt(styleIndex);
|
||||
if (style == null && stylesTable.getNumCellStyles() > 0) {
|
||||
style = stylesTable.getStyleAt(0);
|
||||
}
|
||||
if (style != null) {
|
||||
this.formatIndex = style.getDataFormat();
|
||||
this.formatString = style.getDataFormatString();
|
||||
if (this.formatString == null)
|
||||
if (this.formatString == null) {
|
||||
this.formatString = BuiltinFormats.getBuiltinFormat(this.formatIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -18,6 +18,7 @@
|
||||
package org.apache.poi.xssf.usermodel.examples;
|
||||
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.poi.ss.usermodel.Cell;
|
||||
import org.apache.poi.ss.usermodel.CellStyle;
|
||||
@ -33,7 +34,7 @@ import org.apache.poi.xssf.usermodel.XSSFWorkbook;
|
||||
public class CreateUserDefinedDataFormats {
|
||||
|
||||
|
||||
public static void main(String[]args) throws Exception {
|
||||
public static void main(String[]args) throws IOException {
|
||||
Workbook wb = new XSSFWorkbook(); //or new HSSFWorkbook();
|
||||
Sheet sheet = wb.createSheet("format sheet");
|
||||
CellStyle style;
|
||||
@ -43,14 +44,14 @@ public class CreateUserDefinedDataFormats {
|
||||
short rowNum = 0;
|
||||
short colNum = 0;
|
||||
|
||||
row = sheet.createRow(rowNum++);
|
||||
row = sheet.createRow(rowNum);
|
||||
cell = row.createCell(colNum);
|
||||
cell.setCellValue(11111.25);
|
||||
style = wb.createCellStyle();
|
||||
style.setDataFormat(format.getFormat("0.0"));
|
||||
cell.setCellStyle(style);
|
||||
|
||||
row = sheet.createRow(rowNum++);
|
||||
row = sheet.createRow(++rowNum);
|
||||
cell = row.createCell(colNum);
|
||||
cell.setCellValue(11111.25);
|
||||
style = wb.createCellStyle();
|
||||
@ -60,6 +61,8 @@ public class CreateUserDefinedDataFormats {
|
||||
FileOutputStream fileOut = new FileOutputStream("ooxml_dataFormat.xlsx");
|
||||
wb.write(fileOut);
|
||||
fileOut.close();
|
||||
|
||||
wb.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -17,18 +17,19 @@
|
||||
package org.apache.poi.xssf.usermodel.examples;
|
||||
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.poi.ss.usermodel.Sheet;
|
||||
import org.apache.poi.ss.usermodel.Workbook;
|
||||
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
|
||||
|
||||
public class SelectedSheet {
|
||||
public abstract class SelectedSheet {
|
||||
|
||||
public static void main(String[]args) throws Exception {
|
||||
public static void main(String[]args) throws IOException {
|
||||
Workbook wb = new XSSFWorkbook(); //or new HSSFWorkbook();
|
||||
|
||||
Sheet sheet = wb.createSheet("row sheet");
|
||||
Sheet sheet2 = wb.createSheet("another sheet");
|
||||
wb.createSheet("row sheet");
|
||||
wb.createSheet("another sheet");
|
||||
Sheet sheet3 = wb.createSheet(" sheet 3 ");
|
||||
sheet3.setSelected(true);
|
||||
wb.setActiveSheet(2);
|
||||
@ -38,6 +39,8 @@ public class SelectedSheet {
|
||||
FileOutputStream fileOut = new FileOutputStream("selectedSheet.xlsx");
|
||||
wb.write(fileOut);
|
||||
fileOut.close();
|
||||
|
||||
wb.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -33,7 +33,8 @@ import org.junit.Test;
|
||||
public class HDGFFileHandler extends POIFSFileHandler {
|
||||
@Override
|
||||
public void handleFile(InputStream stream) throws Exception {
|
||||
HDGFDiagram diagram = new HDGFDiagram(new POIFSFileSystem(stream));
|
||||
POIFSFileSystem poifs = new POIFSFileSystem(stream);
|
||||
HDGFDiagram diagram = new HDGFDiagram(poifs);
|
||||
Stream[] topLevelStreams = diagram.getTopLevelStreams();
|
||||
assertNotNull(topLevelStreams);
|
||||
for(Stream str : topLevelStreams) {
|
||||
@ -44,6 +45,8 @@ public class HDGFFileHandler extends POIFSFileHandler {
|
||||
assertNotNull(trailerStream);
|
||||
assertTrue(trailerStream.getPointer().getLength() >= 0);
|
||||
|
||||
poifs.close();
|
||||
|
||||
// writing is not yet implemented... handlePOIDocument(diagram);
|
||||
}
|
||||
|
||||
|
@ -18,21 +18,14 @@ package org.apache.poi.stress;
|
||||
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.StringWriter;
|
||||
|
||||
import org.apache.poi.hdf.extractor.WordDocument;
|
||||
import org.apache.poi.hwpf.HWPFDocument;
|
||||
import org.apache.poi.hwpf.extractor.WordExtractor;
|
||||
import org.junit.Test;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class HWPFFileHandler extends POIFSFileHandler {
|
||||
@Override
|
||||
public void handleFile(InputStream stream) throws Exception {
|
||||
@ -42,25 +35,6 @@ public class HWPFFileHandler extends POIFSFileHandler {
|
||||
assertNotNull(doc.getEndnotes());
|
||||
|
||||
handlePOIDocument(doc);
|
||||
|
||||
// fails for many documents, but is deprecated anyway...
|
||||
// handleWordDocument(doc);
|
||||
}
|
||||
|
||||
protected void handleWordDocument(HWPFDocument doc) throws IOException {
|
||||
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
|
||||
doc.write(outStream);
|
||||
|
||||
WordDocument wordDoc = new WordDocument(new ByteArrayInputStream(outStream.toByteArray()));
|
||||
|
||||
StringWriter docTextWriter = new StringWriter();
|
||||
PrintWriter out = new PrintWriter(docTextWriter);
|
||||
try {
|
||||
wordDoc.writeAllText(out);
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
docTextWriter.close();
|
||||
}
|
||||
|
||||
// a test-case to test this locally without executing the full TestAllFiles
|
||||
|
@ -31,6 +31,7 @@ public class POIFSFileHandler extends AbstractFileHandler {
|
||||
public void handleFile(InputStream stream) throws Exception {
|
||||
POIFSFileSystem fs = new POIFSFileSystem(stream);
|
||||
handlePOIFSFileSystem(fs);
|
||||
fs.close();
|
||||
}
|
||||
|
||||
private void handlePOIFSFileSystem(POIFSFileSystem fs) {
|
||||
@ -45,5 +46,6 @@ public class POIFSFileHandler extends AbstractFileHandler {
|
||||
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
|
||||
POIFSFileSystem fs = new POIFSFileSystem(in);
|
||||
handlePOIFSFileSystem(fs);
|
||||
fs.close();
|
||||
}
|
||||
}
|
||||
|
@ -23,7 +23,7 @@ class IndirectPropertyName
|
||||
{
|
||||
private CodePageString _value;
|
||||
|
||||
IndirectPropertyName( byte[] data, int offset )
|
||||
IndirectPropertyName( byte[] data, int offset ) //NOSONAR
|
||||
{
|
||||
_value = new CodePageString( data, offset );
|
||||
}
|
||||
|
@ -296,15 +296,17 @@ public final class SSTRecord extends ContinuableRecord {
|
||||
* @return The new SST record.
|
||||
*/
|
||||
public ExtSSTRecord createExtSSTRecord(int sstOffset) {
|
||||
if (bucketAbsoluteOffsets == null || bucketAbsoluteOffsets == null)
|
||||
if (bucketAbsoluteOffsets == null || bucketRelativeOffsets == null) {
|
||||
throw new IllegalStateException("SST record has not yet been serialized.");
|
||||
}
|
||||
|
||||
ExtSSTRecord extSST = new ExtSSTRecord();
|
||||
extSST.setNumStringsPerBucket((short)8);
|
||||
int[] absoluteOffsets = bucketAbsoluteOffsets.clone();
|
||||
int[] relativeOffsets = bucketRelativeOffsets.clone();
|
||||
for ( int i = 0; i < absoluteOffsets.length; i++ )
|
||||
for ( int i = 0; i < absoluteOffsets.length; i++ ) {
|
||||
absoluteOffsets[i] += sstOffset;
|
||||
}
|
||||
extSST.setBucketOffsets(absoluteOffsets, relativeOffsets);
|
||||
return extSST;
|
||||
}
|
||||
|
@ -244,10 +244,12 @@ public abstract class OPCPackage implements RelationshipSource, Closeable {
|
||||
*/
|
||||
public static OPCPackage open(File file, PackageAccess access)
|
||||
throws InvalidFormatException {
|
||||
if (file == null)
|
||||
throw new IllegalArgumentException("'file' must be given");
|
||||
if (file == null || (file.exists() && file.isDirectory()))
|
||||
throw new IllegalArgumentException("file must not be a directory");
|
||||
if (file == null) {
|
||||
throw new IllegalArgumentException("'file' must be given");
|
||||
}
|
||||
if (file.exists() && file.isDirectory()) {
|
||||
throw new IllegalArgumentException("file must not be a directory");
|
||||
}
|
||||
|
||||
OPCPackage pack = new ZipPackage(file, access);
|
||||
if (pack.partList == null && access != PackageAccess.WRITE) {
|
||||
|
@ -54,7 +54,7 @@ public final class XSLFPictureData extends POIXMLDocumentPart implements Picture
|
||||
private Long checksum = null;
|
||||
|
||||
// original image dimensions (for formats supported by BufferedImage)
|
||||
private Dimension _origSize = null;
|
||||
private Dimension origSize = null;
|
||||
private int index = -1;
|
||||
|
||||
/**
|
||||
@ -107,8 +107,9 @@ public final class XSLFPictureData extends POIXMLDocumentPart implements Picture
|
||||
*/
|
||||
public String getFileName() {
|
||||
String name = getPackagePart().getPartName().getName();
|
||||
if (name == null)
|
||||
if (name == null) {
|
||||
return null;
|
||||
}
|
||||
return name.substring(name.lastIndexOf('/') + 1);
|
||||
}
|
||||
|
||||
@ -132,7 +133,7 @@ public final class XSLFPictureData extends POIXMLDocumentPart implements Picture
|
||||
@Override
|
||||
public Dimension getImageDimension() {
|
||||
cacheProperties();
|
||||
return _origSize;
|
||||
return origSize;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -148,21 +149,21 @@ public final class XSLFPictureData extends POIXMLDocumentPart implements Picture
|
||||
* Determine and cache image properties
|
||||
*/
|
||||
protected void cacheProperties() {
|
||||
if (_origSize == null || checksum == null) {
|
||||
if (origSize == null || checksum == null) {
|
||||
byte data[] = getData();
|
||||
checksum = IOUtils.calculateChecksum(data);
|
||||
|
||||
switch (getType()) {
|
||||
case EMF:
|
||||
_origSize = new EMF.NativeHeader(data, 0).getSize();
|
||||
origSize = new EMF.NativeHeader(data, 0).getSize();
|
||||
break;
|
||||
case WMF:
|
||||
// wmf files in pptx usually have their placeable header
|
||||
// stripped away, so this returns only the dummy size
|
||||
_origSize = new WMF.NativeHeader(data, 0).getSize();
|
||||
origSize = new WMF.NativeHeader(data, 0).getSize();
|
||||
break;
|
||||
case PICT:
|
||||
_origSize = new PICT.NativeHeader(data, 0).getSize();
|
||||
origSize = new PICT.NativeHeader(data, 0).getSize();
|
||||
break;
|
||||
default:
|
||||
BufferedImage img = null;
|
||||
@ -172,7 +173,7 @@ public final class XSLFPictureData extends POIXMLDocumentPart implements Picture
|
||||
logger.log(POILogger.WARN, "Can't determine image dimensions", e);
|
||||
}
|
||||
// set dummy size, in case of dummy dimension can't be set
|
||||
_origSize = (img == null)
|
||||
origSize = (img == null)
|
||||
? new Dimension(200,200)
|
||||
: new Dimension(
|
||||
(int)Units.pixelToPoints(img.getWidth()),
|
||||
@ -204,7 +205,7 @@ public final class XSLFPictureData extends POIXMLDocumentPart implements Picture
|
||||
// recalculate now since we already have the data bytes available anyhow
|
||||
checksum = IOUtils.calculateChecksum(data);
|
||||
|
||||
_origSize = null; // need to recalculate image size
|
||||
origSize = null; // need to recalculate image size
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -112,8 +112,9 @@ public class XWPFPictureData extends POIXMLDocumentPart {
|
||||
*/
|
||||
public String getFileName() {
|
||||
String name = getPackagePart().getPartName().getName();
|
||||
if (name == null)
|
||||
if (name == null) {
|
||||
return null;
|
||||
}
|
||||
return name.substring(name.lastIndexOf('/') + 1);
|
||||
}
|
||||
|
||||
|
@ -1,439 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.event;
|
||||
|
||||
|
||||
import org.apache.poi.hdf.model.util.BTreeSet;
|
||||
import org.apache.poi.hdf.model.util.NumberFormatter;
|
||||
import org.apache.poi.hdf.model.hdftypes.*;
|
||||
import org.apache.poi.util.LittleEndian;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@Deprecated
|
||||
public final class EventBridge implements HDFLowLevelParsingListener
|
||||
{
|
||||
|
||||
private static int HEADER_EVEN_INDEX = 0;
|
||||
private static int HEADER_ODD_INDEX = 1;
|
||||
private static int FOOTER_EVEN_INDEX = 2;
|
||||
private static int FOOTER_ODD_INDEX = 3;
|
||||
private static int HEADER_FIRST_INDEX = 4;
|
||||
private static int FOOTER_FIRST_INDEX = 5;
|
||||
|
||||
/** This class translates low level events into high level events for this
|
||||
* listener */
|
||||
HDFParsingListener _listener;
|
||||
/** stylesheet for this document */
|
||||
StyleSheet _stsh;
|
||||
/** name says it all */
|
||||
DocumentProperties _dop;
|
||||
/** StyleDescription for the current paragraph. */
|
||||
StyleDescription _currentStd;
|
||||
/** List info for this doc */
|
||||
ListTables _listTables;
|
||||
|
||||
|
||||
/** "WordDocument" from the POIFS */
|
||||
byte[] _mainDocument;
|
||||
/** Table0 or Table1 from POIFS */
|
||||
byte[] _tableStream;
|
||||
|
||||
/** text offset in main stream */
|
||||
int _fcMin;
|
||||
int _ccpText;
|
||||
int _ccpFtn;
|
||||
int _hdrSize;
|
||||
int _hdrOffset;
|
||||
|
||||
/** text pieces */
|
||||
BTreeSet _text = new BTreeSet();
|
||||
|
||||
private boolean _beginHeaders;
|
||||
BTreeSet _hdrSections = new BTreeSet();
|
||||
BTreeSet _hdrParagraphs = new BTreeSet();
|
||||
BTreeSet _hdrCharacterRuns = new BTreeSet();
|
||||
|
||||
int _sectionCounter = 1;
|
||||
List<HeaderFooter[]> _hdrs = new ArrayList<HeaderFooter[]>();
|
||||
|
||||
private boolean _holdParagraph = false;
|
||||
private int _endHoldIndex = -1;
|
||||
private List<PropertyNode> _onHold;
|
||||
|
||||
public EventBridge(HDFParsingListener listener)
|
||||
{
|
||||
_listener = listener;
|
||||
}
|
||||
public void mainDocument(byte[] mainDocument) {
|
||||
if (mainDocument == null) {
|
||||
throw new IllegalArgumentException("mainDocument is null.");
|
||||
}
|
||||
_mainDocument = mainDocument.clone();
|
||||
}
|
||||
public void tableStream(byte[] tableStream) {
|
||||
if (tableStream == null) {
|
||||
throw new IllegalArgumentException("tableStream is null.");
|
||||
}
|
||||
_tableStream = tableStream.clone();
|
||||
}
|
||||
public void miscellaneous(int fcMin, int ccpText, int ccpFtn, int fcPlcfhdd, int lcbPlcfhdd)
|
||||
{
|
||||
_fcMin = fcMin;
|
||||
_ccpText = ccpText;
|
||||
_ccpFtn = ccpFtn;
|
||||
_hdrOffset = fcPlcfhdd;
|
||||
_hdrSize = lcbPlcfhdd;
|
||||
}
|
||||
public void document(DocumentProperties dop)
|
||||
{
|
||||
_dop = dop;
|
||||
}
|
||||
public void bodySection(SepxNode sepx)
|
||||
{
|
||||
SectionProperties sep = (SectionProperties)StyleSheet.uncompressProperty(sepx.getSepx(), new SectionProperties(), _stsh);
|
||||
HeaderFooter[] hdrArray = findSectionHdrFtrs(_sectionCounter);
|
||||
_hdrs.add(hdrArray);
|
||||
_listener.section(sep, sepx.getStart() - _fcMin, sepx.getEnd() - _fcMin);
|
||||
_sectionCounter++;
|
||||
}
|
||||
|
||||
public void hdrSection(SepxNode sepx)
|
||||
{
|
||||
_beginHeaders = true;
|
||||
_hdrSections.add(sepx);
|
||||
}
|
||||
public void endSections()
|
||||
{
|
||||
for (int x = 1; x < _sectionCounter; x++)
|
||||
{
|
||||
HeaderFooter[] hdrArray = _hdrs.get(x-1);
|
||||
HeaderFooter hf = null;
|
||||
|
||||
if (!hdrArray[HeaderFooter.HEADER_EVEN - 1].isEmpty())
|
||||
{
|
||||
hf = hdrArray[HeaderFooter.HEADER_EVEN - 1];
|
||||
_listener.header(x - 1, HeaderFooter.HEADER_EVEN);
|
||||
flushHeaderProps(hf.getStart(), hf.getEnd());
|
||||
}
|
||||
if (!hdrArray[HeaderFooter.HEADER_ODD - 1].isEmpty())
|
||||
{
|
||||
hf = hdrArray[HeaderFooter.HEADER_ODD - 1];
|
||||
_listener.header(x - 1, HeaderFooter.HEADER_ODD);
|
||||
flushHeaderProps(hf.getStart(), hf.getEnd());
|
||||
}
|
||||
if (!hdrArray[HeaderFooter.FOOTER_EVEN - 1].isEmpty())
|
||||
{
|
||||
hf = hdrArray[HeaderFooter.FOOTER_EVEN - 1];
|
||||
_listener.footer(x - 1, HeaderFooter.FOOTER_EVEN);
|
||||
flushHeaderProps(hf.getStart(), hf.getEnd());
|
||||
}
|
||||
if (!hdrArray[HeaderFooter.FOOTER_ODD - 1].isEmpty())
|
||||
{
|
||||
hf = hdrArray[HeaderFooter.FOOTER_EVEN - 1];
|
||||
_listener.footer(x - 1, HeaderFooter.FOOTER_EVEN);
|
||||
flushHeaderProps(hf.getStart(), hf.getEnd());
|
||||
}
|
||||
if (!hdrArray[HeaderFooter.HEADER_FIRST - 1].isEmpty())
|
||||
{
|
||||
hf = hdrArray[HeaderFooter.HEADER_FIRST - 1];
|
||||
_listener.header(x - 1, HeaderFooter.HEADER_FIRST);
|
||||
flushHeaderProps(hf.getStart(), hf.getEnd());
|
||||
}
|
||||
if (!hdrArray[HeaderFooter.FOOTER_FIRST - 1].isEmpty())
|
||||
{
|
||||
hf = hdrArray[HeaderFooter.FOOTER_FIRST - 1];
|
||||
_listener.footer(x - 1, HeaderFooter.FOOTER_FIRST);
|
||||
flushHeaderProps(hf.getStart(), hf.getEnd());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
public void paragraph(PapxNode papx)
|
||||
{
|
||||
if (_beginHeaders)
|
||||
{
|
||||
_hdrParagraphs.add(papx);
|
||||
}
|
||||
byte[] bytePapx = papx.getPapx();
|
||||
int istd = LittleEndian.getShort(bytePapx, 0);
|
||||
_currentStd = _stsh.getStyleDescription(istd);
|
||||
|
||||
ParagraphProperties pap = (ParagraphProperties)StyleSheet.uncompressProperty(bytePapx, _currentStd.getPAP(), _stsh);
|
||||
|
||||
if (pap.getFTtp() > 0)
|
||||
{
|
||||
TableProperties tap = (TableProperties)StyleSheet.uncompressProperty(bytePapx, new TableProperties(), _stsh);
|
||||
_listener.tableRowEnd(tap, papx.getStart() - _fcMin, papx.getEnd() - _fcMin);
|
||||
}
|
||||
else if (pap.getIlfo() > 0)
|
||||
{
|
||||
_holdParagraph = true;
|
||||
_endHoldIndex = papx.getEnd();
|
||||
_onHold.add(papx);
|
||||
}
|
||||
else
|
||||
{
|
||||
_listener.paragraph(pap, papx.getStart() - _fcMin, papx.getEnd() - _fcMin);
|
||||
}
|
||||
}
|
||||
|
||||
public void characterRun(ChpxNode chpx)
|
||||
{
|
||||
if (_beginHeaders)
|
||||
{
|
||||
_hdrCharacterRuns.add(chpx);
|
||||
}
|
||||
|
||||
int start = chpx.getStart();
|
||||
int end = chpx.getEnd();
|
||||
//check to see if we should hold this characterRun
|
||||
if (_holdParagraph)
|
||||
{
|
||||
_onHold.add(chpx);
|
||||
if (end >= _endHoldIndex)
|
||||
{
|
||||
_holdParagraph = false;
|
||||
_endHoldIndex = -1;
|
||||
flushHeldParagraph();
|
||||
_onHold = new ArrayList<PropertyNode>();
|
||||
}
|
||||
}
|
||||
|
||||
byte[] byteChpx = chpx.getChpx();
|
||||
|
||||
|
||||
CharacterProperties chp = (CharacterProperties)StyleSheet.uncompressProperty(byteChpx, _currentStd.getCHP(), _stsh);
|
||||
|
||||
List<PropertyNode> textList = BTreeSet.findProperties(start, end, _text.root);
|
||||
String text = getTextFromNodes(textList, start, end);
|
||||
|
||||
_listener.characterRun(chp, text, start - _fcMin, end - _fcMin);
|
||||
}
|
||||
public void text(TextPiece t)
|
||||
{
|
||||
_text.add(t);
|
||||
}
|
||||
public void fonts(FontTable fontTbl)
|
||||
{
|
||||
}
|
||||
public void lists(ListTables listTbl)
|
||||
{
|
||||
_listTables = listTbl;
|
||||
}
|
||||
public void styleSheet(StyleSheet stsh)
|
||||
{
|
||||
_stsh = stsh;
|
||||
}
|
||||
private void flushHeaderProps(int start, int end)
|
||||
{
|
||||
List<PropertyNode> list = BTreeSet.findProperties(start, end, _hdrSections.root);
|
||||
int size = list.size();
|
||||
|
||||
for (int x = 0; x < size; x++)
|
||||
{
|
||||
SepxNode oldNode = (SepxNode)list.get(x);
|
||||
int secStart = Math.max(oldNode.getStart(), start);
|
||||
int secEnd = Math.min(oldNode.getEnd(), end);
|
||||
|
||||
//SepxNode node = new SepxNode(-1, secStart, secEnd, oldNode.getSepx());
|
||||
//bodySection(node);
|
||||
|
||||
List<PropertyNode> parList = BTreeSet.findProperties(secStart, secEnd, _hdrParagraphs.root);
|
||||
int parSize = parList.size();
|
||||
|
||||
for (int y = 0; y < parSize; y++)
|
||||
{
|
||||
PapxNode oldParNode = (PapxNode)parList.get(y);
|
||||
int parStart = Math.max(oldParNode.getStart(), secStart);
|
||||
int parEnd = Math.min(oldParNode.getEnd(), secEnd);
|
||||
|
||||
PapxNode parNode = new PapxNode(parStart, parEnd, oldParNode.getPapx());
|
||||
paragraph(parNode);
|
||||
|
||||
List<PropertyNode> charList = BTreeSet.findProperties(parStart, parEnd, _hdrCharacterRuns.root);
|
||||
int charSize = charList.size();
|
||||
|
||||
for (int z = 0; z < charSize; z++)
|
||||
{
|
||||
ChpxNode oldCharNode = (ChpxNode)charList.get(z);
|
||||
int charStart = Math.max(oldCharNode.getStart(), parStart);
|
||||
int charEnd = Math.min(oldCharNode.getEnd(), parEnd);
|
||||
|
||||
ChpxNode charNode = new ChpxNode(charStart, charEnd, oldCharNode.getChpx());
|
||||
characterRun(charNode);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
private String getTextFromNodes(List<PropertyNode> list, int start, int end)
|
||||
{
|
||||
int size = list.size();
|
||||
|
||||
StringBuffer sb = new StringBuffer();
|
||||
|
||||
for (int x = 0; x < size; x++)
|
||||
{
|
||||
TextPiece piece = (TextPiece)list.get(x);
|
||||
int charStart = Math.max(start, piece.getStart());
|
||||
int charEnd = Math.min(end, piece.getEnd());
|
||||
|
||||
if(piece.usesUnicode())
|
||||
{
|
||||
for (int y = charStart; y < charEnd; y += 2)
|
||||
{
|
||||
sb.append((char)LittleEndian.getShort(_mainDocument, y));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int y = charStart; y < charEnd; y++)
|
||||
{
|
||||
sb.append(_mainDocument[y]);
|
||||
}
|
||||
}
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private void flushHeldParagraph()
|
||||
{
|
||||
PapxNode papx = (PapxNode)_onHold.get(0);
|
||||
byte[] bytePapx = papx.getPapx();
|
||||
int istd = LittleEndian.getShort(bytePapx, 0);
|
||||
StyleDescription std = _stsh.getStyleDescription(istd);
|
||||
|
||||
ParagraphProperties pap = (ParagraphProperties)StyleSheet.uncompressProperty(bytePapx, _currentStd.getPAP(), _stsh);
|
||||
LVL lvl = _listTables.getLevel(pap.getIlfo(), pap.getIlvl());
|
||||
pap = (ParagraphProperties)StyleSheet.uncompressProperty(lvl._papx, pap, _stsh, false);
|
||||
|
||||
int size = _onHold.size() - 1;
|
||||
|
||||
CharacterProperties numChp = (CharacterProperties)StyleSheet.uncompressProperty(((ChpxNode)_onHold.get(size)).getChpx(), std.getCHP(), _stsh);
|
||||
|
||||
numChp = (CharacterProperties)StyleSheet.uncompressProperty(lvl._chpx, numChp, _stsh);
|
||||
String bulletText = getBulletText(lvl, pap);
|
||||
|
||||
_listener.listEntry(bulletText, numChp, pap, papx.getStart() - _fcMin, papx.getEnd() - _fcMin);
|
||||
for (int x = 1; x <= size; x++)
|
||||
{
|
||||
characterRun((ChpxNode)_onHold.get(x));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private String getBulletText(LVL lvl, ParagraphProperties pap)
|
||||
{
|
||||
StringBuffer bulletBuffer = new StringBuffer();
|
||||
for(int x = 0; x < lvl._xst.length; x++)
|
||||
{
|
||||
if(lvl._xst[x] < 9)
|
||||
{
|
||||
LVL numLevel = _listTables.getLevel(pap.getIlfo(), lvl._xst[x]);
|
||||
int num = numLevel._iStartAt;
|
||||
if(lvl == numLevel)
|
||||
{
|
||||
numLevel._iStartAt++;
|
||||
}
|
||||
else if(num > 1)
|
||||
{
|
||||
num--;
|
||||
}
|
||||
bulletBuffer.append(NumberFormatter.getNumber(num, lvl._nfc));
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
bulletBuffer.append(lvl._xst[x]);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
switch (lvl._ixchFollow)
|
||||
{
|
||||
case 0:
|
||||
bulletBuffer.append('\u0009');
|
||||
break;
|
||||
case 1:
|
||||
bulletBuffer.append(' ');
|
||||
break;
|
||||
}
|
||||
return bulletBuffer.toString();
|
||||
}
|
||||
|
||||
private HeaderFooter[] findSectionHdrFtrs(int index)
|
||||
{
|
||||
HeaderFooter[] hdrArray = new HeaderFooter[6];
|
||||
|
||||
for (int x = 1; x < 7; x++)
|
||||
{
|
||||
hdrArray[x-1] = createSectionHdrFtr(index, x);
|
||||
}
|
||||
|
||||
return hdrArray;
|
||||
}
|
||||
|
||||
private HeaderFooter createSectionHdrFtr(int index, int type)
|
||||
{
|
||||
if(_hdrSize < 50)
|
||||
{
|
||||
return new HeaderFooter(0,0,0);
|
||||
}
|
||||
|
||||
int start = _fcMin + _ccpText + _ccpFtn;
|
||||
int end = start;
|
||||
int arrayIndex = 0;
|
||||
|
||||
switch(type)
|
||||
{
|
||||
case HeaderFooter.HEADER_EVEN:
|
||||
arrayIndex = (HEADER_EVEN_INDEX + (index * 6));
|
||||
break;
|
||||
case HeaderFooter.FOOTER_EVEN:
|
||||
arrayIndex = (FOOTER_EVEN_INDEX + (index * 6));
|
||||
break;
|
||||
case HeaderFooter.HEADER_ODD:
|
||||
arrayIndex = (HEADER_ODD_INDEX + (index * 6));
|
||||
break;
|
||||
case HeaderFooter.FOOTER_ODD:
|
||||
arrayIndex = (FOOTER_ODD_INDEX + (index * 6));
|
||||
break;
|
||||
case HeaderFooter.HEADER_FIRST:
|
||||
arrayIndex = (HEADER_FIRST_INDEX + (index * 6));
|
||||
break;
|
||||
case HeaderFooter.FOOTER_FIRST:
|
||||
arrayIndex = (FOOTER_FIRST_INDEX + (index * 6));
|
||||
break;
|
||||
}
|
||||
start += LittleEndian.getInt(_tableStream, _hdrOffset + (arrayIndex * 4));
|
||||
end += LittleEndian.getInt(_tableStream, _hdrOffset + (arrayIndex + 1) * 4);
|
||||
|
||||
HeaderFooter retValue = new HeaderFooter(type, start, end);
|
||||
|
||||
if((end - start) == 0 && index > 1)
|
||||
{
|
||||
retValue = createSectionHdrFtr(type, index - 1);
|
||||
}
|
||||
return retValue;
|
||||
}
|
||||
}
|
@ -1,45 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.event;
|
||||
|
||||
import org.apache.poi.hdf.model.hdftypes.ChpxNode;
|
||||
import org.apache.poi.hdf.model.hdftypes.PapxNode;
|
||||
import org.apache.poi.hdf.model.hdftypes.SepxNode;
|
||||
import org.apache.poi.hdf.model.hdftypes.TextPiece;
|
||||
import org.apache.poi.hdf.model.hdftypes.DocumentProperties;
|
||||
import org.apache.poi.hdf.model.hdftypes.FontTable;
|
||||
import org.apache.poi.hdf.model.hdftypes.ListTables;
|
||||
import org.apache.poi.hdf.model.hdftypes.StyleSheet;
|
||||
|
||||
@Deprecated
|
||||
public interface HDFLowLevelParsingListener
|
||||
{
|
||||
public void mainDocument(byte[] mainDocument);
|
||||
public void tableStream(byte[] tableStream);
|
||||
public void document(DocumentProperties dop);
|
||||
public void bodySection(SepxNode sepx);
|
||||
public void paragraph(PapxNode papx);
|
||||
public void characterRun(ChpxNode chpx);
|
||||
public void hdrSection(SepxNode sepx);
|
||||
public void endSections();
|
||||
public void text(TextPiece t);
|
||||
public void fonts(FontTable fontTbl);
|
||||
public void lists(ListTables listTbl);
|
||||
public void styleSheet(StyleSheet stsh);
|
||||
public void miscellaneous(int fcMin, int ccpText, int ccpFtn, int fcPlcfhdd, int lcbPlcfhdd);
|
||||
}
|
@ -1,38 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.event;
|
||||
|
||||
import org.apache.poi.hdf.model.hdftypes.SectionProperties;
|
||||
import org.apache.poi.hdf.model.hdftypes.CharacterProperties;
|
||||
import org.apache.poi.hdf.model.hdftypes.ParagraphProperties;
|
||||
import org.apache.poi.hdf.model.hdftypes.TableProperties;
|
||||
import org.apache.poi.hdf.model.hdftypes.DocumentProperties;
|
||||
|
||||
@Deprecated
|
||||
public interface HDFParsingListener
|
||||
{
|
||||
public void document(DocumentProperties dop);
|
||||
public void section(SectionProperties sep, int start, int end);
|
||||
public void paragraph(ParagraphProperties pap, int start, int end);
|
||||
public void listEntry(String bulletText, CharacterProperties bulletProperties, ParagraphProperties pap, int start, int end);
|
||||
public void paragraphInTableRow(ParagraphProperties pap, int start, int end);
|
||||
public void characterRun(CharacterProperties chp, String text, int start, int end);
|
||||
public void tableRowEnd(TableProperties tap, int start, int end);
|
||||
public void header(int sectionIndex, int type);
|
||||
public void footer(int sectionIndex, int type);
|
||||
}
|
@ -1,178 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor;
|
||||
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class CHP implements Cloneable
|
||||
{
|
||||
boolean _bold;
|
||||
boolean _italic;
|
||||
boolean _fRMarkDel;
|
||||
boolean _fOutline;
|
||||
boolean _fSmallCaps;
|
||||
boolean _fCaps;
|
||||
boolean _fVanish;
|
||||
boolean _fRMark;
|
||||
boolean _fSpec;
|
||||
boolean _fStrike;
|
||||
boolean _fObj;
|
||||
boolean _fShadow;
|
||||
boolean _fLowerCase;
|
||||
boolean _fData;
|
||||
boolean _fOle2;
|
||||
boolean _fEmboss;
|
||||
boolean _fImprint;
|
||||
boolean _fDStrike;
|
||||
|
||||
short _ftcAscii;
|
||||
short _ftcFE;
|
||||
short _ftcOther;
|
||||
short _ftc;
|
||||
int _hps;//font size in half points
|
||||
int _dxaSpace;//space following each character in the run expressed in twip units
|
||||
byte _iss;//superscript/subscript indices 0 means no super/subscripting 1 means text in run is superscripted 2 means text in run is subscripted
|
||||
byte _kul;//underline code see spec
|
||||
byte _ico;//color of text see spec
|
||||
short _hpsPos;//super/subscript position in half points; positive means text is raised; negative means text is lowered
|
||||
short _lidDefault;//language for non-Far East text
|
||||
short _lidFE;//language for Far East text
|
||||
byte _idctHint;
|
||||
int _wCharScale;
|
||||
short _chse;
|
||||
|
||||
int _specialFC;//varies depending on whether this is a special char
|
||||
short _ibstRMark;//index to author IDs stored in hsttbfRMark. used when text in run was newly typed when revision marking was enabled
|
||||
short _ibstRMarkDel;//index to author IDs stored in hsttbfRMark. used when text in run was newly typed when revision marking was enabled
|
||||
int[] _dttmRMark = new int[2];//Date/time at which this run of text was
|
||||
int[] _dttmRMarkDel = new int[2];//entered/modified by the author. (Only
|
||||
//recorded when revision marking is on.)Date/time at which this run of text was deleted by the author. (Only recorded when revision marking is on.)
|
||||
int _istd;
|
||||
int _baseIstd = -1;
|
||||
int _fcPic;
|
||||
short _ftcSym;// see spec
|
||||
short _xchSym;//see spec
|
||||
byte _ysr;//hyphenation rules
|
||||
byte _chYsr;//used for hyphenation see spec
|
||||
int _hpsKern;//kerning distance for characters in run recorded in half points
|
||||
int _fcObj;
|
||||
byte _icoHighlight;//highlight color
|
||||
boolean _fChsDiff;
|
||||
boolean _highlighted;//when true characters are highlighted with color specified by chp.icoHighlight
|
||||
boolean _fPropMark;//when true, properties have been changed with revision marking on
|
||||
short _ibstPropRMark;//index to author IDs stored in hsttbfRMark. used when properties have been changed when revision marking was enabled
|
||||
int _dttmPropRMark;//Date/time at which properties of this were changed for this run of text by the author
|
||||
byte _sfxtText;//text animation see spec
|
||||
boolean _fDispFldRMark;//see spec
|
||||
short _ibstDispFldRMark;//Index to author IDs stored in hsttbfRMark. used when ListNum field numbering has been changed when revision marking was enabled
|
||||
int _dttmDispFldRMark;//The date for the ListNum field number change
|
||||
byte[] _xstDispFldRMark = new byte[32];//The string value of the ListNum field when revision mark tracking began
|
||||
short _shd;//shading
|
||||
short[] _brc = new short[2];//border
|
||||
short _paddingStart = 0;
|
||||
short _paddingEnd = 0;
|
||||
|
||||
public CHP()
|
||||
{
|
||||
_istd = 10;
|
||||
_hps = 20;
|
||||
_lidDefault = 0x0400;
|
||||
_lidFE = 0x0400;
|
||||
|
||||
}
|
||||
public void copy(CHP toCopy)
|
||||
{
|
||||
_bold = toCopy._bold;
|
||||
_italic = toCopy._italic;
|
||||
_fRMarkDel = toCopy._fRMarkDel;
|
||||
_fOutline = toCopy._fOutline;
|
||||
_fSmallCaps = toCopy._fSmallCaps;
|
||||
_fCaps = toCopy._fCaps;
|
||||
_fVanish = toCopy._fVanish;
|
||||
_fRMark = toCopy._fRMark;
|
||||
_fSpec = toCopy._fSpec;
|
||||
_fStrike = toCopy._fStrike;
|
||||
_fObj = toCopy._fObj;
|
||||
_fShadow = toCopy._fShadow;
|
||||
_fLowerCase = toCopy._fLowerCase;
|
||||
_fData = toCopy._fData;
|
||||
_fOle2 = toCopy._fOle2;
|
||||
_fEmboss = toCopy._fEmboss;
|
||||
_fImprint = toCopy._fImprint;
|
||||
_fDStrike = toCopy._fDStrike;
|
||||
|
||||
_ftcAscii = toCopy._ftcAscii;
|
||||
_ftcFE = toCopy._ftcFE;
|
||||
_ftcOther = toCopy._ftcOther;
|
||||
_ftc = toCopy._ftc;
|
||||
_hps = toCopy._hps;
|
||||
_dxaSpace = toCopy._dxaSpace;
|
||||
_iss = toCopy._iss;
|
||||
_kul = toCopy._kul;
|
||||
_ico = toCopy._ico;
|
||||
_hpsPos = toCopy._hpsPos;
|
||||
_lidDefault = toCopy._lidDefault;
|
||||
_lidFE = toCopy._lidFE;
|
||||
_idctHint = toCopy._idctHint;
|
||||
_wCharScale = toCopy._wCharScale;
|
||||
_chse = toCopy._chse;
|
||||
|
||||
_specialFC = toCopy._specialFC;
|
||||
_ibstRMark = toCopy._ibstRMark;
|
||||
_ibstRMarkDel = toCopy._ibstRMarkDel;
|
||||
_dttmRMark = toCopy._dttmRMark;
|
||||
_dttmRMarkDel = toCopy._dttmRMarkDel;
|
||||
|
||||
_istd = toCopy._istd;
|
||||
_baseIstd = toCopy._baseIstd;
|
||||
_fcPic = toCopy._fcPic;
|
||||
_ftcSym = toCopy._ftcSym;
|
||||
_xchSym = toCopy._xchSym;
|
||||
_ysr = toCopy._ysr;
|
||||
_chYsr = toCopy._chYsr;
|
||||
_hpsKern = toCopy._hpsKern;
|
||||
_fcObj = toCopy._fcObj;
|
||||
_icoHighlight = toCopy._icoHighlight;
|
||||
_fChsDiff = toCopy._fChsDiff;
|
||||
_highlighted = toCopy._highlighted;
|
||||
_fPropMark = toCopy._fPropMark;
|
||||
_ibstPropRMark = toCopy._ibstPropRMark;
|
||||
_dttmPropRMark = toCopy._dttmPropRMark;
|
||||
_sfxtText = toCopy._sfxtText;
|
||||
_fDispFldRMark = toCopy._fDispFldRMark;
|
||||
_ibstDispFldRMark = toCopy._ibstDispFldRMark;
|
||||
_dttmDispFldRMark = toCopy._dttmDispFldRMark;
|
||||
_xstDispFldRMark = toCopy._xstDispFldRMark;
|
||||
_shd = toCopy._shd;
|
||||
_brc = toCopy._brc;
|
||||
|
||||
}
|
||||
|
||||
public Object clone() throws CloneNotSupportedException
|
||||
{
|
||||
CHP clone = (CHP)super.clone();
|
||||
clone._brc = new short[2];
|
||||
System.arraycopy(_brc, 0, clone._brc, 0, 2);
|
||||
return clone;
|
||||
}
|
||||
}
|
@ -1,63 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class FontTable
|
||||
{
|
||||
String[] fontNames;
|
||||
|
||||
public FontTable(byte[] fontTable)
|
||||
{
|
||||
int size = Utils.convertBytesToShort(fontTable, 0);
|
||||
fontNames = new String[size];
|
||||
|
||||
int currentIndex = 4;
|
||||
for(int x = 0; x < size; x++)
|
||||
{
|
||||
byte ffnLength = fontTable[currentIndex];
|
||||
|
||||
int nameOffset = currentIndex + 40;
|
||||
StringBuffer nameBuf = new StringBuffer();
|
||||
char ch = Utils.getUnicodeCharacter(fontTable, nameOffset);
|
||||
while(ch != '\0')
|
||||
{
|
||||
nameBuf.append(ch);
|
||||
nameOffset += 2;
|
||||
ch = Utils.getUnicodeCharacter(fontTable, nameOffset);
|
||||
}
|
||||
fontNames[x] = nameBuf.toString();
|
||||
if(fontNames[x].startsWith("Times"))
|
||||
{
|
||||
fontNames[x] = "Times";
|
||||
}
|
||||
|
||||
currentIndex += ffnLength + 1;
|
||||
}
|
||||
|
||||
}
|
||||
public String getFont(int index)
|
||||
{
|
||||
return fontNames[index];
|
||||
}
|
||||
}
|
@ -1,57 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class HeaderFooter
|
||||
{
|
||||
public static final int HEADER_EVEN = 1;
|
||||
public static final int HEADER_ODD = 2;
|
||||
public static final int FOOTER_EVEN = 3;
|
||||
public static final int FOOTER_ODD = 4;
|
||||
public static final int HEADER_FIRST = 5;
|
||||
public static final int FOOTER_FIRST = 6;
|
||||
|
||||
private int _type;
|
||||
private int _start;
|
||||
private int _end;
|
||||
|
||||
public HeaderFooter(int type, int startFC, int endFC)
|
||||
{
|
||||
_type = type;
|
||||
_start = startFC;
|
||||
_end = endFC;
|
||||
}
|
||||
public int getStart()
|
||||
{
|
||||
return _start;
|
||||
}
|
||||
public int getEnd()
|
||||
{
|
||||
return _end;
|
||||
}
|
||||
public boolean isEmpty()
|
||||
{
|
||||
return _start - _end == 0;
|
||||
}
|
||||
}
|
@ -1,244 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class NewOleFile extends RandomAccessFile
|
||||
{
|
||||
private byte[] LAOLA_ID_ARRAY = new byte[]{(byte)0xd0, (byte)0xcf, (byte)0x11,
|
||||
(byte)0xe0, (byte)0xa1, (byte)0xb1,
|
||||
(byte)0x1a, (byte)0xe1};
|
||||
private int _num_bbd_blocks;
|
||||
private int _root_startblock;
|
||||
private int _sbd_startblock;
|
||||
private long _size;
|
||||
private int[] _bbd_list;
|
||||
protected int[] _big_block_depot;
|
||||
protected int[] _small_block_depot;
|
||||
Map<String,PropertySet> _propertySetsHT = new HashMap<String,PropertySet>();
|
||||
List<PropertySet> _propertySetsV = new ArrayList<PropertySet>();
|
||||
|
||||
public NewOleFile(String fileName, String mode) throws FileNotFoundException
|
||||
{
|
||||
super(fileName, mode);
|
||||
try
|
||||
{
|
||||
init();
|
||||
}
|
||||
catch(Exception e)
|
||||
{
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
private void init() throws IOException
|
||||
{
|
||||
|
||||
for(int x = 0; x < LAOLA_ID_ARRAY.length; x++)
|
||||
{
|
||||
if(LAOLA_ID_ARRAY[x] != readByte())
|
||||
{
|
||||
throw new IOException("Not an OLE file");
|
||||
}
|
||||
}
|
||||
_size = length();
|
||||
_num_bbd_blocks = readInt(0x2c);
|
||||
_root_startblock = readInt(0x30);
|
||||
_sbd_startblock = readInt(0x3c);
|
||||
_bbd_list = new int[_num_bbd_blocks];
|
||||
//populate bbd_list. If _num_bbd_blocks > 109 I have to do it
|
||||
//differently
|
||||
if(_num_bbd_blocks <= 109)
|
||||
{
|
||||
seek(0x4c);
|
||||
for(int x = 0; x < _num_bbd_blocks; x++)
|
||||
{
|
||||
_bbd_list[x] = readIntLE();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
populateBbdList();
|
||||
}
|
||||
//populate the big block depot
|
||||
_big_block_depot = new int[_num_bbd_blocks * 128];
|
||||
int counter = 0;
|
||||
for(int x = 0; x < _num_bbd_blocks; x++)
|
||||
{
|
||||
int offset = (_bbd_list[x] + 1) * 512;
|
||||
seek(offset);
|
||||
for(int y = 0; y < 128; y++)
|
||||
{
|
||||
_big_block_depot[counter++] = readIntLE();
|
||||
}
|
||||
}
|
||||
_small_block_depot = createSmallBlockDepot();
|
||||
int[] rootChain = readChain(_big_block_depot, _root_startblock);
|
||||
initializePropertySets(rootChain);
|
||||
|
||||
}
|
||||
|
||||
public static void main(String args[]) throws Exception {
|
||||
NewOleFile nof = new NewOleFile(args[0], "r");
|
||||
nof.close();
|
||||
}
|
||||
|
||||
protected int[] readChain(int[] blockChain, int startBlock) {
|
||||
|
||||
int[] tempChain = new int[blockChain.length];
|
||||
tempChain[0] = startBlock;
|
||||
int x = 1;
|
||||
for(;;x++)
|
||||
{
|
||||
int nextVal = blockChain[tempChain[x-1]];
|
||||
if(nextVal != -2)
|
||||
{
|
||||
tempChain[x] = nextVal;
|
||||
}
|
||||
else
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
int[] newChain = new int[x];
|
||||
System.arraycopy(tempChain, 0, newChain, 0, x);
|
||||
|
||||
return newChain;
|
||||
}
|
||||
private void initializePropertySets(int[] rootChain) throws IOException
|
||||
{
|
||||
for(int x = 0; x < rootChain.length; x++)
|
||||
{
|
||||
int offset = (rootChain[x] + 1) * 512;
|
||||
seek(offset);
|
||||
for(int y = 0; y < 4; y++)
|
||||
{
|
||||
//read the block the makes up the property set
|
||||
byte[] propArray = new byte[128];
|
||||
read(propArray);
|
||||
|
||||
//parse the byte array for properties
|
||||
int nameSize = Utils.convertBytesToShort(propArray[0x41], propArray[0x40])/2 - 1;
|
||||
if(nameSize > 0)
|
||||
{
|
||||
StringBuffer nameBuffer = new StringBuffer(nameSize);
|
||||
for(int z = 0; z < nameSize; z++)
|
||||
{
|
||||
nameBuffer.append((char)propArray[z*2]);
|
||||
}
|
||||
int type = propArray[0x42];
|
||||
int previous_pps = Utils.convertBytesToInt(propArray[0x47], propArray[0x46], propArray[0x45], propArray[0x44]);
|
||||
int next_pps = Utils.convertBytesToInt(propArray[0x4b], propArray[0x4a], propArray[0x49], propArray[0x48]);
|
||||
int pps_dir = Utils.convertBytesToInt(propArray[0x4f], propArray[0x4e], propArray[0x4d], propArray[0x4c]);
|
||||
int pps_sb = Utils.convertBytesToInt(propArray[0x77], propArray[0x76], propArray[0x75], propArray[0x74]);
|
||||
int pps_size = Utils.convertBytesToInt(propArray[0x7b], propArray[0x7a], propArray[0x79], propArray[0x78]);
|
||||
|
||||
PropertySet propSet = new PropertySet(nameBuffer.toString(),
|
||||
type, previous_pps, next_pps,
|
||||
pps_dir, pps_sb, pps_size,
|
||||
(x*4) + y);
|
||||
_propertySetsHT.put(nameBuffer.toString(), propSet);
|
||||
_propertySetsV.add(propSet);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
private int[] createSmallBlockDepot() throws IOException
|
||||
{
|
||||
|
||||
int[] sbd_list = readChain(_big_block_depot, _sbd_startblock);
|
||||
int[] small_block_depot = new int[sbd_list.length * 128];
|
||||
|
||||
for(int x = 0; x < sbd_list.length && sbd_list[x] != -2; x++)
|
||||
{
|
||||
int offset = ((sbd_list[x] + 1) * 512);
|
||||
seek(offset);
|
||||
for(int y = 0; y < 128; y++)
|
||||
{
|
||||
small_block_depot[y] = readIntLE();
|
||||
}
|
||||
}
|
||||
return small_block_depot;
|
||||
}
|
||||
|
||||
private void populateBbdList() throws IOException
|
||||
{
|
||||
seek(0x4c);
|
||||
for(int x = 0; x < 109; x++)
|
||||
{
|
||||
_bbd_list[x] = readIntLE();
|
||||
}
|
||||
int pos = 109;
|
||||
int remainder = _num_bbd_blocks - 109;
|
||||
seek(0x48);
|
||||
int numLists = readIntLE();
|
||||
seek(0x44);
|
||||
int firstList = readIntLE();
|
||||
|
||||
firstList = (firstList + 1) * 512;
|
||||
|
||||
for(int y = 0; y < numLists; y++)
|
||||
{
|
||||
int size = Math.min(127, remainder);
|
||||
for(int z = 0; z < size; z++)
|
||||
{
|
||||
seek(firstList + (z * 4));
|
||||
_bbd_list[pos++] = readIntLE();
|
||||
}
|
||||
if(size == 127)
|
||||
{
|
||||
seek(firstList + (127 * 4));
|
||||
firstList = readIntLE();
|
||||
firstList = (firstList + 1) * 512;
|
||||
remainder -= 127;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
private int readInt(long offset) throws IOException
|
||||
{
|
||||
seek(offset);
|
||||
return readIntLE();
|
||||
}
|
||||
private int readIntLE() throws IOException
|
||||
{
|
||||
byte[] intBytes = new byte[4];
|
||||
read(intBytes);
|
||||
return Utils.convertBytesToInt(intBytes[3], intBytes[2], intBytes[1], intBytes[0]);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
@ -1,131 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class PAP implements Cloneable
|
||||
{
|
||||
int _istd;//index to style descriptor.
|
||||
byte _jc;//justification code
|
||||
byte _fKeep;//keep entire paragraph on one page if possible
|
||||
byte _fKeepFollow;//keep paragraph on same page with next paragraph if possible
|
||||
byte _fPageBreakBefore;//start this paragraph on new page
|
||||
byte _positionByte;//multiple flags see spec;
|
||||
byte _brcp;//rectangle border codes for Macword 3.0
|
||||
byte _brcl;//border line styles for Macword 3.0
|
||||
byte _ilvl;//when non-zero, list level for this paragraph
|
||||
byte _fNoLnn;//no line numbering for this paragraph. (makes this an exception to the section property of line numbering)
|
||||
int _ilfo;//when non-zero, (1-based) index into the pllfo identifying the list to which the paragraph belongs
|
||||
byte _fSideBySide;//when 1, paragraph is a side by side paragraph
|
||||
byte _fNoAutoHyph;//when 0, text in paragraph may be auto hyphenated.
|
||||
byte _fWindowControl;//when 1, Word will prevent widowed lines in this paragraph from being placed at the beginning of a page
|
||||
int _dxaRight;//indent from right margin (signed).
|
||||
int _dxaLeft;//indent from left margin (signed)
|
||||
int _dxaLeft1;//first line indent; signed number relative to dxaLeft
|
||||
int[] _lspd = new int[2];//line spacing descriptor see spec
|
||||
int _dyaBefore;// vertical spacing before paragraph (unsigned)
|
||||
int _dyaAfter;//vertical spacing after paragraph (unsigned)
|
||||
byte[] _phe = new byte[12];//height of current paragraph
|
||||
byte _fCrLf;//undocumented
|
||||
byte _fUsePgsuSettings;//undocumented
|
||||
byte _fAdjustRight;//undocumented
|
||||
byte _fKinsoku;// when 1, apply kinsoku rules when performing line wrapping
|
||||
byte _fWordWrap;//when 1, perform word wrap
|
||||
byte _fOverflowPunct;//when 1, apply overflow punctuation rules when performing line wrapping
|
||||
byte _fTopLinePunct;//when 1, perform top line punctuation processing
|
||||
byte _fAutoSpaceDE;//when 1, auto space FE and alphabetic characters
|
||||
byte _fAutoSpaceDN;// when 1, auto space FE and numeric characters
|
||||
int _wAlignFont;//font alignment 0 Hanging 1 Centered 2 Roman 3 Variable 4 Auto
|
||||
short _fontAlign;//multiVal see Spec.
|
||||
byte _fInTable;//when 1, paragraph is contained in a table row
|
||||
byte _fTtp;//when 1, paragraph consists only of the row mark special character and marks the end of a table row
|
||||
byte _wr;//Wrap Code for absolute objects
|
||||
byte _fLocked;//when 1, paragraph may not be edited
|
||||
int _dxaAbs;//see spec
|
||||
int _dyaAbs;//see spec
|
||||
int _dxaWidth;//when not == 0, paragraph is constrained to be dxaWidth wide, independent of current margin or column settings
|
||||
short[] _brcTop = new short[2];//spec for border above paragraph
|
||||
short[] _brcLeft = new short[2];//specification for border to the left of
|
||||
short[] _brcBottom = new short[2];//paragraphspecification for border below
|
||||
short[] _brcRight = new short[2];//paragraphspecification for border to the
|
||||
short[] _brcBetween = new short[2];//right of paragraphsee spec
|
||||
short[] _brcBar = new short[2];//specification of border to place on
|
||||
short _brcTop1;//outside of text when facing pages are to be displayed.spec
|
||||
short _brcLeft1;//for border above paragraphspecification for border to the
|
||||
short _brcBottom1;//left ofparagraphspecification for border below
|
||||
short _brcRight1;//paragraphspecification for border to the
|
||||
short _brcBetween1;//right of paragraphsee spec
|
||||
short _brcBar1;//specification of border to place on outside of text when facing pages are to be displayed.
|
||||
int _dxaFromText;//horizontal distance to be maintained between an absolutely positioned paragraph and any non-absolute positioned text
|
||||
int _dyaFromText;//vertical distance to be maintained between an absolutely positioned paragraph and any non-absolute positioned text
|
||||
int _dyaHeight;//see spec
|
||||
int _shd;//shading
|
||||
int _dcs;//drop cap specifier
|
||||
byte[] _anld = new byte[84];//autonumber list descriptor (see ANLD definition)
|
||||
short _fPropRMark;//when 1, properties have been changed with revision marking on
|
||||
short _ibstPropRMark;//index to author IDs stored in hsttbfRMark. used when properties have been changed when revision marking was enabled
|
||||
byte[] _dttmPropRMark = new byte[4];//Date/time at which properties of this were changed for this run of text by the author. (Only recorded when revision marking is on.)
|
||||
byte[] _numrm = new byte[8];//paragraph numbering revision mark data (see NUMRM)
|
||||
short _itbdMac;//number of tabs stops defined for paragraph. Must be >= 0 and <= 64.
|
||||
|
||||
|
||||
|
||||
public PAP()
|
||||
{
|
||||
_fWindowControl = 1;
|
||||
//lspd[0] = 240;
|
||||
_lspd[1] = 1;
|
||||
_ilvl = 9;
|
||||
}
|
||||
public Object clone() throws CloneNotSupportedException
|
||||
{
|
||||
PAP clone = (PAP)super.clone();
|
||||
|
||||
clone._brcBar = new short[2];
|
||||
clone._brcBottom = new short[2];
|
||||
clone._brcLeft = new short[2];
|
||||
clone._brcBetween = new short[2];
|
||||
clone._brcRight = new short[2];
|
||||
clone._brcTop = new short[2];
|
||||
clone._lspd = new int[2];
|
||||
clone._phe = new byte[12];
|
||||
clone._anld = new byte[84];
|
||||
clone._dttmPropRMark = new byte[4];
|
||||
clone._numrm = new byte[8];
|
||||
|
||||
System.arraycopy(_brcBar, 0, clone._brcBar, 0, 2);
|
||||
System.arraycopy(_brcBottom, 0, clone._brcBottom, 0, 2);
|
||||
System.arraycopy(_brcLeft, 0, clone._brcLeft, 0, 2);
|
||||
System.arraycopy(_brcBetween, 0, clone._brcBetween, 0, 2);
|
||||
System.arraycopy(_brcRight, 0, clone._brcRight, 0, 2);
|
||||
System.arraycopy(_brcTop, 0, clone._brcTop, 0, 2);
|
||||
System.arraycopy(_lspd, 0, clone._lspd, 0, 2);
|
||||
System.arraycopy(_phe, 0, clone._phe, 0, 12);
|
||||
System.arraycopy(_anld, 0, clone._anld, 0, 84);
|
||||
System.arraycopy(_dttmPropRMark, 0, clone._dttmPropRMark, 0, 4);
|
||||
System.arraycopy(_numrm, 0, clone._numrm, 0, 8);
|
||||
|
||||
return clone;
|
||||
}
|
||||
|
||||
}
|
@ -1,57 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class PropertySet
|
||||
{
|
||||
private String _name;
|
||||
private int _type;
|
||||
private int _previous;
|
||||
private int _next;
|
||||
private int _dir;
|
||||
private int _sb;
|
||||
private int _size;
|
||||
private int _num;
|
||||
|
||||
public PropertySet(String name, int type, int previous, int next, int dir,
|
||||
int sb, int size, int num)
|
||||
{
|
||||
_name = name;
|
||||
_type = type;
|
||||
_previous = previous;
|
||||
_next = next;
|
||||
_dir = dir;
|
||||
_sb = sb;
|
||||
_size = size;
|
||||
_num = num;
|
||||
}
|
||||
public int getSize()
|
||||
{
|
||||
return _size;
|
||||
}
|
||||
public int getStartBlock()
|
||||
{
|
||||
return _sb;
|
||||
}
|
||||
}
|
@ -1,100 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class SEP
|
||||
{
|
||||
int _index;
|
||||
byte _bkc;
|
||||
boolean _fTitlePage;
|
||||
boolean _fAutoPgn;
|
||||
byte _nfcPgn;
|
||||
boolean _fUnlocked;
|
||||
byte _cnsPgn;
|
||||
boolean _fPgnRestart;
|
||||
boolean _fEndNote;
|
||||
byte _lnc;
|
||||
byte _grpfIhdt;
|
||||
short _nLnnMod;
|
||||
int _dxaLnn;
|
||||
short _dxaPgn;
|
||||
short _dyaPgn;
|
||||
boolean _fLBetween;
|
||||
byte _vjc;
|
||||
short _dmBinFirst;
|
||||
short _dmBinOther;
|
||||
short _dmPaperReq;
|
||||
short[] _brcTop = new short[2];
|
||||
short[] _brcLeft = new short[2];
|
||||
short[] _brcBottom = new short[2];
|
||||
short[] _brcRight = new short[2];
|
||||
boolean _fPropMark;
|
||||
int _dxtCharSpace;
|
||||
int _dyaLinePitch;
|
||||
short _clm;
|
||||
byte _dmOrientPage;
|
||||
byte _iHeadingPgn;
|
||||
short _pgnStart;
|
||||
short _lnnMin;
|
||||
short _wTextFlow;
|
||||
short _pgbProp;
|
||||
int _xaPage;
|
||||
int _yaPage;
|
||||
int _dxaLeft;
|
||||
int _dxaRight;
|
||||
int _dyaTop;
|
||||
int _dyaBottom;
|
||||
int _dzaGutter;
|
||||
int _dyaHdrTop;
|
||||
int _dyaHdrBottom;
|
||||
short _ccolM1;
|
||||
boolean _fEvenlySpaced;
|
||||
int _dxaColumns;
|
||||
int[] _rgdxaColumnWidthSpacing;
|
||||
byte _dmOrientFirst;
|
||||
byte[] _olstAnn;
|
||||
|
||||
|
||||
|
||||
public SEP()
|
||||
{
|
||||
_bkc = 2;
|
||||
_dyaPgn = 720;
|
||||
_dxaPgn = 720;
|
||||
_fEndNote = true;
|
||||
_fEvenlySpaced = true;
|
||||
_xaPage = 12240;
|
||||
_yaPage = 15840;
|
||||
_dyaHdrTop = 720;
|
||||
_dyaHdrBottom = 720;
|
||||
_dmOrientPage = 1;
|
||||
_dxaColumns = 720;
|
||||
_dyaTop = 1440;
|
||||
_dxaLeft = 1800;
|
||||
_dyaBottom = 1440;
|
||||
_dxaRight = 1800;
|
||||
_pgnStart = 1;
|
||||
|
||||
}
|
||||
}
|
@ -1,132 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor;
|
||||
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class StyleDescription
|
||||
{
|
||||
|
||||
private static int PARAGRAPH_STYLE = 1;
|
||||
private static int CHARACTER_STYLE = 2;
|
||||
|
||||
int _baseStyleIndex;
|
||||
int _styleTypeCode;
|
||||
int _numUPX;
|
||||
byte[] _papx;
|
||||
byte[] _chpx;
|
||||
PAP _pap;
|
||||
CHP _chp;
|
||||
|
||||
public StyleDescription()
|
||||
{
|
||||
_pap = new PAP();
|
||||
_chp = new CHP();
|
||||
}
|
||||
public StyleDescription(byte[] std, int baseLength, boolean word9)
|
||||
{
|
||||
int infoShort = Utils.convertBytesToShort(std, 2);
|
||||
_styleTypeCode = (infoShort & 0xf);
|
||||
_baseStyleIndex = (infoShort & 0xfff0) >> 4;
|
||||
|
||||
infoShort = Utils.convertBytesToShort(std, 4);
|
||||
_numUPX = infoShort & 0xf;
|
||||
|
||||
//first byte(s) of variable length section of std is the length of the
|
||||
//style name and aliases string
|
||||
int nameLength = 0;
|
||||
int multiplier = 1;
|
||||
if(word9)
|
||||
{
|
||||
nameLength = Utils.convertBytesToShort(std, baseLength);
|
||||
multiplier = 2;
|
||||
}
|
||||
else
|
||||
{
|
||||
nameLength = std[baseLength];
|
||||
}
|
||||
//2 bytes for length, length then null terminator.
|
||||
int grupxStart = multiplier + ((nameLength + 1) * multiplier) + baseLength;
|
||||
|
||||
int offset = 0;
|
||||
for(int x = 0; x < _numUPX; x++)
|
||||
{
|
||||
int upxSize = Utils.convertBytesToShort(std, grupxStart + offset);
|
||||
if(_styleTypeCode == PARAGRAPH_STYLE)
|
||||
{
|
||||
if(x == 0)
|
||||
{
|
||||
_papx = new byte[upxSize];
|
||||
System.arraycopy(std, grupxStart + offset + 2, _papx, 0, upxSize);
|
||||
}
|
||||
else if(x == 1)
|
||||
{
|
||||
_chpx = new byte[upxSize];
|
||||
System.arraycopy(std, grupxStart + offset + 2, _chpx, 0, upxSize);
|
||||
}
|
||||
}
|
||||
else if(_styleTypeCode == CHARACTER_STYLE && x == 0)
|
||||
{
|
||||
_chpx = new byte[upxSize];
|
||||
System.arraycopy(std, grupxStart + offset + 2, _chpx, 0, upxSize);
|
||||
}
|
||||
|
||||
if((upxSize & 2) == 1)
|
||||
{
|
||||
++upxSize;
|
||||
}
|
||||
offset += 2 + upxSize;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
public int getBaseStyle()
|
||||
{
|
||||
return _baseStyleIndex;
|
||||
}
|
||||
public byte[] getCHPX()
|
||||
{
|
||||
return _chpx;
|
||||
}
|
||||
public byte[] getPAPX()
|
||||
{
|
||||
return _papx;
|
||||
}
|
||||
public PAP getPAP()
|
||||
{
|
||||
return _pap;
|
||||
}
|
||||
public CHP getCHP()
|
||||
{
|
||||
return _chp;
|
||||
}
|
||||
public void setPAP(PAP pap)
|
||||
{
|
||||
_pap = pap;
|
||||
}
|
||||
public void setCHP(CHP chp)
|
||||
{
|
||||
_chp = chp;
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -1,49 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class TAP
|
||||
{
|
||||
short _jc;
|
||||
int _dxaGapHalf;
|
||||
int _dyaRowHeight;
|
||||
boolean _fCantSplit;
|
||||
boolean _fTableHeader;
|
||||
boolean _fLastRow;
|
||||
short _itcMac;
|
||||
short[] _rgdxaCenter;
|
||||
short[] _brcLeft = new short[2];
|
||||
short[] _brcRight = new short[2];
|
||||
short[] _brcTop = new short[2];
|
||||
short[] _brcBottom = new short[2];
|
||||
short[] _brcHorizontal = new short[2];
|
||||
short[] _brcVertical = new short[2];
|
||||
|
||||
TC[] _rgtc;
|
||||
|
||||
|
||||
public TAP()
|
||||
{
|
||||
}
|
||||
}
|
@ -1,73 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class TC
|
||||
{
|
||||
|
||||
boolean _fFirstMerged;
|
||||
boolean _fMerged;
|
||||
boolean _fVertical;
|
||||
boolean _fBackward;
|
||||
boolean _fRotateFont;
|
||||
boolean _fVertMerge;
|
||||
boolean _fVertRestart;
|
||||
short _vertAlign;
|
||||
short[] _brcTop = new short[2];
|
||||
short[] _brcLeft = new short[2];
|
||||
short[] _brcBottom = new short[2];
|
||||
short[] _brcRight = new short [2];
|
||||
|
||||
public TC()
|
||||
{
|
||||
}
|
||||
static TC convertBytesToTC(byte[] array, int offset)
|
||||
{
|
||||
TC tc = new TC();
|
||||
int rgf = Utils.convertBytesToShort(array, offset);
|
||||
tc._fFirstMerged = (rgf & 0x0001) > 0;
|
||||
tc._fMerged = (rgf & 0x0002) > 0;
|
||||
tc._fVertical = (rgf & 0x0004) > 0;
|
||||
tc._fBackward = (rgf & 0x0008) > 0;
|
||||
tc._fRotateFont = (rgf & 0x0010) > 0;
|
||||
tc._fVertMerge = (rgf & 0x0020) > 0;
|
||||
tc._fVertRestart = (rgf & 0x0040) > 0;
|
||||
tc._vertAlign = (short)((rgf & 0x0180) >> 7);
|
||||
|
||||
tc._brcTop[0] = Utils.convertBytesToShort(array, offset + 4);
|
||||
tc._brcTop[1] = Utils.convertBytesToShort(array, offset + 6);
|
||||
|
||||
tc._brcLeft[0] = Utils.convertBytesToShort(array, offset + 8);
|
||||
tc._brcLeft[1] = Utils.convertBytesToShort(array, offset + 10);
|
||||
|
||||
tc._brcBottom[0] = Utils.convertBytesToShort(array, offset + 12);
|
||||
tc._brcBottom[1] = Utils.convertBytesToShort(array, offset + 14);
|
||||
|
||||
tc._brcRight[0] = Utils.convertBytesToShort(array, offset + 16);
|
||||
tc._brcRight[1] = Utils.convertBytesToShort(array, offset + 18);
|
||||
|
||||
return tc;
|
||||
}
|
||||
|
||||
}
|
@ -1,46 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class TableRow
|
||||
{
|
||||
TAP _descriptor;
|
||||
ArrayList<String> _cells;
|
||||
|
||||
public TableRow(ArrayList<String> cells, TAP descriptor)
|
||||
{
|
||||
_cells = cells;
|
||||
_descriptor = descriptor;
|
||||
}
|
||||
public TAP getTAP()
|
||||
{
|
||||
return _descriptor;
|
||||
}
|
||||
public ArrayList<String> getCells()
|
||||
{
|
||||
return _cells;
|
||||
}
|
||||
}
|
@ -1,51 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor;
|
||||
|
||||
import org.apache.poi.hdf.extractor.util.*;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class TextPiece extends PropertyNode implements Comparable
|
||||
{
|
||||
private boolean _usesUnicode;
|
||||
private int _length;
|
||||
|
||||
public TextPiece(int start, int length, boolean unicode)
|
||||
{
|
||||
super(start, start + length, null);
|
||||
_usesUnicode = unicode;
|
||||
_length = length;
|
||||
//_fcStart = start;
|
||||
//_fcEnd = start + length;
|
||||
|
||||
}
|
||||
public boolean usesUnicode()
|
||||
{
|
||||
return _usesUnicode;
|
||||
}
|
||||
|
||||
public int compareTo(Object obj) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
}
|
@ -1,60 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class Utils
|
||||
{
|
||||
|
||||
public static short convertBytesToShort(byte firstByte, byte secondByte)
|
||||
{
|
||||
return (short)convertBytesToInt((byte)0, (byte)0, firstByte, secondByte);
|
||||
}
|
||||
public static int convertBytesToInt(byte firstByte, byte secondByte,
|
||||
byte thirdByte, byte fourthByte)
|
||||
{
|
||||
int firstInt = 0xff & firstByte;
|
||||
int secondInt = 0xff & secondByte;
|
||||
int thirdInt = 0xff & thirdByte;
|
||||
int fourthInt = 0xff & fourthByte;
|
||||
|
||||
return (firstInt << 24) | (secondInt << 16) | (thirdInt << 8) | fourthInt;
|
||||
}
|
||||
public static short convertBytesToShort(byte[] array, int offset)
|
||||
{
|
||||
return convertBytesToShort(array[offset + 1], array[offset]);
|
||||
}
|
||||
public static int convertBytesToInt(byte[] array, int offset)
|
||||
{
|
||||
return convertBytesToInt(array[offset + 3], array[offset + 2], array[offset + 1], array[offset]);
|
||||
}
|
||||
public static int convertUnsignedByteToInt(byte b)
|
||||
{
|
||||
return (0xff & b);
|
||||
}
|
||||
public static char getUnicodeCharacter(byte[] array, int offset)
|
||||
{
|
||||
return (char)convertBytesToShort(array, offset);
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -1,40 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor.data;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class DOP
|
||||
{
|
||||
|
||||
public boolean _fFacingPages;
|
||||
public int _fpc;
|
||||
public int _epc;
|
||||
public int _rncFtn;
|
||||
public int _nFtn;
|
||||
public int _rncEdn;
|
||||
public int _nEdn;
|
||||
|
||||
public DOP()
|
||||
{
|
||||
}
|
||||
}
|
@ -1,36 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor.data;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class LFO
|
||||
{
|
||||
int _lsid;
|
||||
int _clfolvl;
|
||||
LFOLVL[] _levels;
|
||||
|
||||
public LFO()
|
||||
{
|
||||
|
||||
}
|
||||
}
|
@ -1,37 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor.data;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class LFOLVL
|
||||
{
|
||||
int _iStartAt;
|
||||
int _ilvl;
|
||||
boolean _fStartAt;
|
||||
boolean _fFormatting;
|
||||
LVL _override;
|
||||
|
||||
public LFOLVL()
|
||||
{
|
||||
}
|
||||
}
|
@ -1,37 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor.data;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class LST
|
||||
{
|
||||
int _lsid;
|
||||
int _tplc;
|
||||
byte[] _rgistd = new byte[18];
|
||||
boolean _fSimpleList;
|
||||
LVL[] _levels;
|
||||
|
||||
public LST()
|
||||
{
|
||||
}
|
||||
}
|
@ -1,63 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor.data;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class LVL
|
||||
{
|
||||
public int _iStartAt;
|
||||
public byte _nfc;
|
||||
byte _jc;
|
||||
boolean _fLegal;
|
||||
boolean _fNoRestart;
|
||||
boolean _fPrev;
|
||||
boolean _fPrevSpace;
|
||||
boolean _fWord6;
|
||||
public byte[] _rgbxchNums = new byte[9];
|
||||
public byte _ixchFollow;
|
||||
public byte[] _chpx;
|
||||
public byte[] _papx;
|
||||
public char[] _xst;
|
||||
public short _istd;
|
||||
|
||||
//byte _cbGrpprlChpx;
|
||||
//byte _cbGrpprlPapx;
|
||||
|
||||
|
||||
public LVL()
|
||||
{
|
||||
}
|
||||
public Object clone()
|
||||
{
|
||||
LVL obj = null;
|
||||
try
|
||||
{
|
||||
obj = (LVL)super.clone();
|
||||
}
|
||||
catch(Exception e)
|
||||
{
|
||||
e.printStackTrace();
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
}
|
@ -1,183 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor.data;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.poi.hdf.extractor.*;
|
||||
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class ListTables
|
||||
{
|
||||
|
||||
LFO[] _pllfo;
|
||||
Hashtable _lists = new Hashtable();
|
||||
|
||||
public ListTables(byte[] plcflst, byte[] plflfo)
|
||||
{
|
||||
initLST(plcflst);
|
||||
initLFO(plflfo);
|
||||
}
|
||||
public LVL getLevel(int list, int level)
|
||||
{
|
||||
|
||||
LFO override = _pllfo[list - 1];
|
||||
|
||||
for(int x = 0; x < override._clfolvl; x++)
|
||||
{
|
||||
if(override._levels[x]._ilvl == level)
|
||||
{
|
||||
LFOLVL lfolvl = override._levels[x];
|
||||
if(lfolvl._fFormatting)
|
||||
{
|
||||
LST lst = (LST)_lists.get(Integer.valueOf(override._lsid));
|
||||
LVL lvl = lfolvl._override;
|
||||
lvl._istd = Utils.convertBytesToShort(lst._rgistd, level * 2);
|
||||
return lvl;
|
||||
}
|
||||
else if(lfolvl._fStartAt)
|
||||
{
|
||||
LST lst = (LST)_lists.get(Integer.valueOf(override._lsid));
|
||||
LVL lvl = lst._levels[level];
|
||||
LVL newLvl = (LVL)lvl.clone();
|
||||
newLvl._istd = Utils.convertBytesToShort(lst._rgistd, level * 2);
|
||||
newLvl._iStartAt = lfolvl._iStartAt;
|
||||
return newLvl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
LST lst = (LST)_lists.get(Integer.valueOf(override._lsid));
|
||||
LVL lvl = lst._levels[level];
|
||||
lvl._istd = Utils.convertBytesToShort(lst._rgistd, level * 2);
|
||||
return lvl;
|
||||
|
||||
|
||||
}
|
||||
private void initLST(byte[] plcflst)
|
||||
{
|
||||
short length = Utils.convertBytesToShort(plcflst, 0);
|
||||
int nextLevelOffset = 0;
|
||||
//LST[] lstArray = new LST[length];
|
||||
for(int x = 0; x < length; x++)
|
||||
{
|
||||
LST lst = new LST();
|
||||
lst._lsid = Utils.convertBytesToInt(plcflst, 2 + (x * 28));
|
||||
lst._tplc = Utils.convertBytesToInt(plcflst, 2 + 4 + (x * 28));
|
||||
System.arraycopy(plcflst, 2 + 8 + (x * 28), lst._rgistd, 0, 18);
|
||||
byte code = plcflst[2 + 26 + (x * 28)];
|
||||
lst._fSimpleList = StyleSheet.getFlag(code & 0x01);
|
||||
//lstArray[x] = lst;
|
||||
_lists.put(Integer.valueOf(lst._lsid), lst);
|
||||
|
||||
if(lst._fSimpleList)
|
||||
{
|
||||
lst._levels = new LVL[1];
|
||||
}
|
||||
else
|
||||
{
|
||||
lst._levels = new LVL[9];
|
||||
}
|
||||
|
||||
for(int y = 0; y < lst._levels.length; y++)
|
||||
{
|
||||
int offset = 2 + (length * 28) + nextLevelOffset;
|
||||
lst._levels[y] = new LVL();
|
||||
nextLevelOffset += createLVL(plcflst, offset, lst._levels[y]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
private void initLFO(byte[] plflfo)
|
||||
{
|
||||
int lfoSize = Utils.convertBytesToInt(plflfo, 0);
|
||||
_pllfo = new LFO[lfoSize];
|
||||
for(int x = 0; x < lfoSize; x++)
|
||||
{
|
||||
LFO nextLFO = new LFO();
|
||||
nextLFO._lsid = Utils.convertBytesToInt(plflfo, 4 + (x * 16));
|
||||
nextLFO._clfolvl = plflfo[4 + 12 + (x * 16)];
|
||||
nextLFO._levels = new LFOLVL[nextLFO._clfolvl];
|
||||
_pllfo[x] = nextLFO;
|
||||
}
|
||||
|
||||
int lfolvlOffset = (lfoSize * 16) + 4;
|
||||
int lvlOffset = 0;
|
||||
int lfolvlNum = 0;
|
||||
for(int x = 0; x < lfoSize; x++)
|
||||
{
|
||||
for(int y = 0; y < _pllfo[x]._clfolvl; y++)
|
||||
{
|
||||
int offset = lfolvlOffset + (lfolvlNum * 8) + lvlOffset;
|
||||
LFOLVL lfolvl = new LFOLVL();
|
||||
lfolvl._iStartAt = Utils.convertBytesToInt(plflfo, offset);
|
||||
lfolvl._ilvl = Utils.convertBytesToInt(plflfo, offset + 4);
|
||||
lfolvl._fStartAt = StyleSheet.getFlag(lfolvl._ilvl & 0x10);
|
||||
lfolvl._fFormatting = StyleSheet.getFlag(lfolvl._ilvl & 0x20);
|
||||
lfolvl._ilvl = (lfolvl._ilvl & (byte)0x0f);
|
||||
lfolvlNum++;
|
||||
|
||||
if(lfolvl._fFormatting)
|
||||
{
|
||||
offset = lfolvlOffset + (lfolvlNum * 12) + lvlOffset;
|
||||
lfolvl._override = new LVL();
|
||||
lvlOffset += createLVL(plflfo, offset, lfolvl._override);
|
||||
}
|
||||
_pllfo[x]._levels[y] = lfolvl;
|
||||
}
|
||||
}
|
||||
}
|
||||
private int createLVL(byte[] data, int offset, LVL lvl)
|
||||
{
|
||||
|
||||
lvl._iStartAt = Utils.convertBytesToInt(data, offset);
|
||||
lvl._nfc = data[offset + 4];
|
||||
int code = Utils.convertBytesToInt(data, offset + 5);
|
||||
lvl._jc = (byte)(code & 0x03);
|
||||
lvl._fLegal = StyleSheet.getFlag(code & 0x04);
|
||||
lvl._fNoRestart = StyleSheet.getFlag(code & 0x08);
|
||||
lvl._fPrev = StyleSheet.getFlag(code & 0x10);
|
||||
lvl._fPrevSpace = StyleSheet.getFlag(code & 0x20);
|
||||
lvl._fWord6 = StyleSheet.getFlag(code & 0x40);
|
||||
System.arraycopy(data, offset + 6, lvl._rgbxchNums, 0, 9);
|
||||
lvl._ixchFollow = data[offset + 15];
|
||||
int chpxSize = data[offset + 24];
|
||||
int papxSize = data[offset + 25];
|
||||
lvl._chpx = new byte[chpxSize];
|
||||
lvl._papx = new byte[papxSize];
|
||||
System.arraycopy(data, offset + 28, lvl._papx, 0, papxSize);
|
||||
System.arraycopy(data, offset + 28 + papxSize, lvl._chpx, 0, chpxSize);
|
||||
offset += 28 + papxSize + chpxSize;//modify offset
|
||||
int xstSize = Utils.convertBytesToShort(data, offset);
|
||||
lvl._xst = new char[xstSize];
|
||||
|
||||
offset += 2;
|
||||
for(int x = 0; x < xstSize; x++)
|
||||
{
|
||||
lvl._xst[x] = (char)Utils.convertBytesToShort(data, offset + (x * 2));
|
||||
}
|
||||
return 28 + papxSize + chpxSize + 2 + (xstSize * 2);
|
||||
}
|
||||
}
|
@ -1,691 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor.util;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
|
||||
/*
|
||||
* A B-Tree like implementation of the java.util.Set inteface. This is a modifiable set
|
||||
* and thus allows elements to be added and removed. An instance of java.util.Comparator
|
||||
* must be provided at construction else all Objects added to the set must implement
|
||||
* java.util.Comparable and must be comparable to one another. No duplicate elements
|
||||
* will be allowed in any BTreeSet in accordance with the specifications of the Set interface.
|
||||
* Any attempt to add a null element will result in an IllegalArgumentException being thrown.
|
||||
* The java.util.Iterator returned by the iterator method guarantees the elements returned
|
||||
* are in ascending order. The Iterator.remove() method is supported.
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*
|
||||
*/
|
||||
@Deprecated
|
||||
public final class BTreeSet extends AbstractSet implements Set {
|
||||
|
||||
/*
|
||||
* Instance Variables
|
||||
*/
|
||||
public BTreeNode root;
|
||||
private Comparator comparator = null;
|
||||
int order;
|
||||
int size = 0;
|
||||
|
||||
/*
|
||||
* Constructors
|
||||
* A no-arg constructor is supported in accordance with the specifications of the
|
||||
* java.util.Collections interface. If the order for the B-Tree is not specified
|
||||
* at construction it defaults to 32.
|
||||
*/
|
||||
|
||||
public BTreeSet() {
|
||||
this(6); // Default order for a BTreeSet is 32
|
||||
}
|
||||
|
||||
public BTreeSet(Collection c) {
|
||||
this(6); // Default order for a BTreeSet is 32
|
||||
addAll(c);
|
||||
}
|
||||
|
||||
public BTreeSet(int order) {
|
||||
this(order, null);
|
||||
}
|
||||
|
||||
public BTreeSet(int order, Comparator comparator) {
|
||||
this.order = order;
|
||||
this.comparator = comparator;
|
||||
root = new BTreeNode(null);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Public Methods
|
||||
*/
|
||||
public boolean add(Object x) throws IllegalArgumentException {
|
||||
if (x == null) throw new IllegalArgumentException();
|
||||
return root.insert(x, -1);
|
||||
}
|
||||
|
||||
public boolean contains(Object x) {
|
||||
return root.includes(x);
|
||||
}
|
||||
|
||||
public boolean remove(Object x) {
|
||||
if (x == null) return false;
|
||||
return root.delete(x, -1);
|
||||
}
|
||||
|
||||
public int size() {
|
||||
return size;
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
root = new BTreeNode(null);
|
||||
size = 0;
|
||||
}
|
||||
|
||||
public java.util.Iterator iterator() {
|
||||
return new BTIterator();
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Private methods
|
||||
*/
|
||||
int compare(Object x, Object y) {
|
||||
return (comparator == null ? ((Comparable)x).compareTo(y) : comparator.compare(x, y));
|
||||
}
|
||||
|
||||
|
||||
|
||||
/*
|
||||
* Inner Classes
|
||||
*/
|
||||
|
||||
/*
|
||||
* Guarantees that the Objects are returned in ascending order. Due to the volatile
|
||||
* structure of a B-Tree (many splits, steals and merges can happen in a single call to remove)
|
||||
* this Iterator does not attempt to track any concurrent changes that are happening to
|
||||
* it's BTreeSet. Therefore, after every call to BTreeSet.remove or BTreeSet.add a new
|
||||
* Iterator should be constructed. If no new Iterator is constructed than there is a
|
||||
* chance of receiving a NullPointerException. The Iterator.delete method is supported.
|
||||
*/
|
||||
|
||||
private final class BTIterator implements java.util.Iterator {
|
||||
private int index = 0;
|
||||
Stack parentIndex = new Stack(); // Contains all parentIndicies for currentNode
|
||||
private Object lastReturned = null;
|
||||
private Object next;
|
||||
BTreeNode currentNode;
|
||||
|
||||
BTIterator() {
|
||||
currentNode = firstNode();
|
||||
next = nextElement();
|
||||
}
|
||||
|
||||
public boolean hasNext() {
|
||||
return next != null;
|
||||
}
|
||||
|
||||
public Object next() {
|
||||
if (next == null) throw new NoSuchElementException();
|
||||
|
||||
lastReturned = next;
|
||||
next = nextElement();
|
||||
return lastReturned;
|
||||
}
|
||||
|
||||
public void remove() {
|
||||
if (lastReturned == null) throw new NoSuchElementException();
|
||||
|
||||
BTreeSet.this.remove(lastReturned);
|
||||
lastReturned = null;
|
||||
}
|
||||
|
||||
private BTreeNode firstNode() {
|
||||
BTreeNode temp = BTreeSet.this.root;
|
||||
|
||||
while (temp._entries[0].child != null) {
|
||||
temp = temp._entries[0].child;
|
||||
parentIndex.push(Integer.valueOf(0));
|
||||
}
|
||||
|
||||
return temp;
|
||||
}
|
||||
|
||||
private Object nextElement() {
|
||||
if (currentNode.isLeaf()) {
|
||||
if (index < currentNode._nrElements) return currentNode._entries[index++].element;
|
||||
|
||||
else if (!parentIndex.empty()) { //All elements have been returned, return successor of lastReturned if it exists
|
||||
currentNode = currentNode._parent;
|
||||
index = ((Integer)parentIndex.pop()).intValue();
|
||||
|
||||
while (index == currentNode._nrElements) {
|
||||
if (parentIndex.empty()) break;
|
||||
currentNode = currentNode._parent;
|
||||
index = ((Integer)parentIndex.pop()).intValue();
|
||||
}
|
||||
|
||||
if (index == currentNode._nrElements) return null; //Reached root and he has no more children
|
||||
return currentNode._entries[index++].element;
|
||||
}
|
||||
|
||||
else { //Your a leaf and the root
|
||||
if (index == currentNode._nrElements) return null;
|
||||
return currentNode._entries[index++].element;
|
||||
}
|
||||
}
|
||||
|
||||
// else - You're not a leaf so simply find and return the successor of lastReturned
|
||||
currentNode = currentNode._entries[index].child;
|
||||
parentIndex.push(Integer.valueOf(index));
|
||||
|
||||
while (currentNode._entries[0].child != null) {
|
||||
currentNode = currentNode._entries[0].child;
|
||||
parentIndex.push(Integer.valueOf(0));
|
||||
}
|
||||
|
||||
index = 1;
|
||||
return currentNode._entries[0].element;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static class Entry {
|
||||
|
||||
public Object element;
|
||||
public BTreeNode child;
|
||||
}
|
||||
|
||||
|
||||
public class BTreeNode {
|
||||
|
||||
public Entry[] _entries;
|
||||
public BTreeNode _parent;
|
||||
int _nrElements = 0;
|
||||
private final int MIN = (BTreeSet.this.order - 1) / 2;
|
||||
|
||||
BTreeNode(BTreeNode parent) {
|
||||
_parent = parent;
|
||||
_entries = new Entry[BTreeSet.this.order];
|
||||
_entries[0] = new Entry();
|
||||
}
|
||||
|
||||
boolean insert(Object x, int parentIndex) {
|
||||
if (isFull()) { // If full, you must split and promote splitNode before inserting
|
||||
Object splitNode = _entries[_nrElements / 2].element;
|
||||
BTreeNode rightSibling = split();
|
||||
|
||||
if (isRoot()) { // Grow a level
|
||||
splitRoot(splitNode, this, rightSibling);
|
||||
// Determine where to insert
|
||||
if (BTreeSet.this.compare(x, BTreeSet.this.root._entries[0].element) < 0) insert(x, 0);
|
||||
else rightSibling.insert(x, 1);
|
||||
}
|
||||
|
||||
else { // Promote splitNode
|
||||
_parent.insertSplitNode(splitNode, this, rightSibling, parentIndex);
|
||||
if (BTreeSet.this.compare(x, _parent._entries[parentIndex].element) < 0) {
|
||||
return insert(x, parentIndex);
|
||||
}
|
||||
return rightSibling.insert(x, parentIndex + 1);
|
||||
}
|
||||
}
|
||||
|
||||
else if (isLeaf()) { // If leaf, simply insert the non-duplicate element
|
||||
int insertAt = childToInsertAt(x, true);
|
||||
if (insertAt == -1) {
|
||||
return false; // Determine if the element already exists
|
||||
}
|
||||
insertNewElement(x, insertAt);
|
||||
BTreeSet.this.size++;
|
||||
return true;
|
||||
}
|
||||
|
||||
else { // If not full and not leaf recursively find correct node to insert at
|
||||
int insertAt = childToInsertAt(x, true);
|
||||
return (insertAt == -1 ? false : _entries[insertAt].child.insert(x, insertAt));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean includes(Object x) {
|
||||
int index = childToInsertAt(x, true);
|
||||
if (index == -1) return true;
|
||||
if (_entries[index] == null || _entries[index].child == null) return false;
|
||||
return _entries[index].child.includes(x);
|
||||
}
|
||||
|
||||
boolean delete(Object x, int parentIndex) {
|
||||
int i = childToInsertAt(x, true);
|
||||
int priorParentIndex = parentIndex;
|
||||
BTreeNode temp = this;
|
||||
if (i != -1) {
|
||||
do {
|
||||
if (temp._entries[i] == null || temp._entries[i].child == null) return false;
|
||||
temp = temp._entries[i].child;
|
||||
priorParentIndex = parentIndex;
|
||||
parentIndex = i;
|
||||
i = temp.childToInsertAt(x, true);
|
||||
} while (i != -1);
|
||||
} // Now temp contains element to delete and temp's parentIndex is parentIndex
|
||||
|
||||
if (temp.isLeaf()) { // If leaf and have more than MIN elements, simply delete
|
||||
if (temp._nrElements > MIN) {
|
||||
temp.deleteElement(x);
|
||||
BTreeSet.this.size--;
|
||||
return true;
|
||||
}
|
||||
|
||||
// else If leaf and have less than MIN elements, than prepare the BTreeSet for deletion
|
||||
temp.prepareForDeletion(parentIndex);
|
||||
temp.deleteElement(x);
|
||||
BTreeSet.this.size--;
|
||||
temp.fixAfterDeletion(priorParentIndex);
|
||||
return true;
|
||||
}
|
||||
|
||||
// else Only delete at leaf so first switch with successor than delete
|
||||
temp.switchWithSuccessor(x);
|
||||
parentIndex = temp.childToInsertAt(x, false) + 1;
|
||||
return temp._entries[parentIndex].child.delete(x, parentIndex);
|
||||
}
|
||||
|
||||
|
||||
private boolean isFull() { return _nrElements == (BTreeSet.this.order - 1); }
|
||||
|
||||
boolean isLeaf() { return _entries[0].child == null; }
|
||||
|
||||
private boolean isRoot() { return _parent == null; }
|
||||
|
||||
/*
|
||||
* Splits a BTreeNode into two BTreeNodes, removing the splitNode from the
|
||||
* calling BTreeNode.
|
||||
*/
|
||||
private BTreeNode split() {
|
||||
BTreeNode rightSibling = new BTreeNode(_parent);
|
||||
int index = _nrElements / 2;
|
||||
_entries[index++].element = null;
|
||||
|
||||
for (int i = 0, nr = _nrElements; index <= nr; i++, index++) {
|
||||
rightSibling._entries[i] = _entries[index];
|
||||
if (rightSibling._entries[i] != null && rightSibling._entries[i].child != null)
|
||||
rightSibling._entries[i].child._parent = rightSibling;
|
||||
_entries[index] = null;
|
||||
_nrElements--;
|
||||
rightSibling._nrElements++;
|
||||
}
|
||||
|
||||
rightSibling._nrElements--; // Need to correct for copying the last Entry which has a null element and a child
|
||||
return rightSibling;
|
||||
}
|
||||
|
||||
/*
|
||||
* Creates a new BTreeSet.root which contains only the splitNode and pointers
|
||||
* to it's left and right child.
|
||||
*/
|
||||
private void splitRoot(Object splitNode, BTreeNode left, BTreeNode right) {
|
||||
BTreeNode newRoot = new BTreeNode(null);
|
||||
newRoot._entries[0].element = splitNode;
|
||||
newRoot._entries[0].child = left;
|
||||
newRoot._entries[1] = new Entry();
|
||||
newRoot._entries[1].child = right;
|
||||
newRoot._nrElements = 1;
|
||||
left._parent = right._parent = newRoot;
|
||||
BTreeSet.this.root = newRoot;
|
||||
}
|
||||
|
||||
private void insertSplitNode(Object splitNode, BTreeNode left, BTreeNode right, int insertAt) {
|
||||
for (int i = _nrElements; i >= insertAt; i--) _entries[i + 1] = _entries[i];
|
||||
|
||||
_entries[insertAt] = new Entry();
|
||||
_entries[insertAt].element = splitNode;
|
||||
_entries[insertAt].child = left;
|
||||
_entries[insertAt + 1].child = right;
|
||||
|
||||
_nrElements++;
|
||||
}
|
||||
|
||||
private void insertNewElement(Object x, int insertAt) {
|
||||
|
||||
for (int i = _nrElements; i > insertAt; i--) _entries[i] = _entries[i - 1];
|
||||
|
||||
_entries[insertAt] = new Entry();
|
||||
_entries[insertAt].element = x;
|
||||
|
||||
_nrElements++;
|
||||
}
|
||||
|
||||
/*
|
||||
* Possibly a deceptive name for a pretty cool method. Uses binary search
|
||||
* to determine the postion in entries[] in which to traverse to find the correct
|
||||
* BTreeNode in which to insert a new element. If the element exists in the calling
|
||||
* BTreeNode than -1 is returned. When the parameter position is true and the element
|
||||
* is present in the calling BTreeNode -1 is returned, if position is false and the
|
||||
* element is contained in the calling BTreeNode than the position of the element
|
||||
* in entries[] is returned.
|
||||
*/
|
||||
private int childToInsertAt(Object x, boolean position) {
|
||||
int index = _nrElements / 2;
|
||||
|
||||
if (_entries[index] == null || _entries[index].element == null) return index;
|
||||
|
||||
int lo = 0, hi = _nrElements - 1;
|
||||
while (lo <= hi) {
|
||||
if (BTreeSet.this.compare(x, _entries[index].element) > 0) {
|
||||
lo = index + 1;
|
||||
index = (hi + lo) / 2;
|
||||
}
|
||||
else {
|
||||
hi = index - 1;
|
||||
index = (hi + lo) / 2;
|
||||
}
|
||||
}
|
||||
|
||||
hi++;
|
||||
if (_entries[hi] == null || _entries[hi].element == null) return hi;
|
||||
return (!position ? hi : BTreeSet.this.compare(x, _entries[hi].element) == 0 ? -1 : hi);
|
||||
}
|
||||
|
||||
|
||||
private void deleteElement(Object x) {
|
||||
int index = childToInsertAt(x, false);
|
||||
for (; index < (_nrElements - 1); index++) _entries[index] = _entries[index + 1];
|
||||
|
||||
if (_nrElements == 1) _entries[index] = new Entry(); // This is root and it is empty
|
||||
else _entries[index] = null;
|
||||
|
||||
_nrElements--;
|
||||
}
|
||||
|
||||
private void prepareForDeletion(int parentIndex) {
|
||||
if (isRoot()) return; // Don't attempt to steal or merge if your the root
|
||||
|
||||
// If not root then try to steal left
|
||||
else if (parentIndex != 0 && _parent._entries[parentIndex - 1].child._nrElements > MIN) {
|
||||
stealLeft(parentIndex);
|
||||
return;
|
||||
}
|
||||
|
||||
// If not root and can't steal left try to steal right
|
||||
else if (parentIndex < _entries.length && _parent._entries[parentIndex + 1] != null && _parent._entries[parentIndex + 1].child != null && _parent._entries[parentIndex + 1].child._nrElements > MIN) {
|
||||
stealRight(parentIndex);
|
||||
return;
|
||||
}
|
||||
|
||||
// If not root and can't steal left or right then try to merge left
|
||||
else if (parentIndex != 0) {
|
||||
mergeLeft(parentIndex);
|
||||
return;
|
||||
}
|
||||
|
||||
// If not root and can't steal left or right and can't merge left you must be able to merge right
|
||||
else mergeRight(parentIndex);
|
||||
}
|
||||
|
||||
private void fixAfterDeletion(int parentIndex) {
|
||||
if (isRoot() || _parent.isRoot()) return; // No fixing needed
|
||||
|
||||
if (_parent._nrElements < MIN) { // If parent lost it's n/2 element repair it
|
||||
BTreeNode temp = _parent;
|
||||
temp.prepareForDeletion(parentIndex);
|
||||
if (temp._parent == null) return; // Root changed
|
||||
if (!temp._parent.isRoot() && temp._parent._nrElements < MIN) { // If need be recurse
|
||||
BTreeNode x = temp._parent._parent;
|
||||
int i = 0;
|
||||
// Find parent's parentIndex
|
||||
for (; i < _entries.length; i++) if (x._entries[i].child == temp._parent) break;
|
||||
temp._parent.fixAfterDeletion(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void switchWithSuccessor(Object x) {
|
||||
int index = childToInsertAt(x, false);
|
||||
BTreeNode temp = _entries[index + 1].child;
|
||||
while (temp._entries[0] != null && temp._entries[0].child != null) temp = temp._entries[0].child;
|
||||
Object successor = temp._entries[0].element;
|
||||
temp._entries[0].element = _entries[index].element;
|
||||
_entries[index].element = successor;
|
||||
}
|
||||
|
||||
/*
|
||||
* This method is called only when the BTreeNode has the minimum number of elements,
|
||||
* has a leftSibling, and the leftSibling has more than the minimum number of elements.
|
||||
*/
|
||||
private void stealLeft(int parentIndex) {
|
||||
BTreeNode p = _parent;
|
||||
BTreeNode ls = _parent._entries[parentIndex - 1].child;
|
||||
|
||||
if (isLeaf()) { // When stealing from leaf to leaf don't worry about children
|
||||
int add = childToInsertAt(p._entries[parentIndex - 1].element, true);
|
||||
insertNewElement(p._entries[parentIndex - 1].element, add);
|
||||
p._entries[parentIndex - 1].element = ls._entries[ls._nrElements - 1].element;
|
||||
ls._entries[ls._nrElements - 1] = null;
|
||||
ls._nrElements--;
|
||||
}
|
||||
|
||||
else { // Was called recursively to fix an undermanned parent
|
||||
_entries[0].element = p._entries[parentIndex - 1].element;
|
||||
p._entries[parentIndex - 1].element = ls._entries[ls._nrElements - 1].element;
|
||||
_entries[0].child = ls._entries[ls._nrElements].child;
|
||||
_entries[0].child._parent = this;
|
||||
ls._entries[ls._nrElements] = null;
|
||||
ls._entries[ls._nrElements - 1].element = null;
|
||||
_nrElements++;
|
||||
ls._nrElements--;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* This method is called only when stealLeft can't be called, the BTreeNode
|
||||
* has the minimum number of elements, has a rightSibling, and the rightSibling
|
||||
* has more than the minimum number of elements.
|
||||
*/
|
||||
private void stealRight(int parentIndex) {
|
||||
BTreeNode p = _parent;
|
||||
BTreeNode rs = p._entries[parentIndex + 1].child;
|
||||
|
||||
if (isLeaf()) { // When stealing from leaf to leaf don't worry about children
|
||||
_entries[_nrElements] = new Entry();
|
||||
_entries[_nrElements].element = p._entries[parentIndex].element;
|
||||
p._entries[parentIndex].element = rs._entries[0].element;
|
||||
for (int i = 0; i < rs._nrElements; i++) rs._entries[i] = rs._entries[i + 1];
|
||||
rs._entries[rs._nrElements - 1] = null;
|
||||
_nrElements++;
|
||||
rs._nrElements--;
|
||||
}
|
||||
|
||||
else { // Was called recursively to fix an undermanned parent
|
||||
for (int i = 0; i <= _nrElements; i++) _entries[i] = _entries[i + 1];
|
||||
_entries[_nrElements].element = p._entries[parentIndex].element;
|
||||
p._entries[parentIndex].element = rs._entries[0].element;
|
||||
_entries[_nrElements + 1] = new Entry();
|
||||
_entries[_nrElements + 1].child = rs._entries[0].child;
|
||||
_entries[_nrElements + 1].child._parent = this;
|
||||
for (int i = 0; i <= rs._nrElements; i++) rs._entries[i] = rs._entries[i + 1];
|
||||
rs._entries[rs._nrElements] = null;
|
||||
_nrElements++;
|
||||
rs._nrElements--;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* This method is called only when stealLeft and stealRight could not be called,
|
||||
* the BTreeNode has the minimum number of elements, has a leftSibling, and the
|
||||
* leftSibling has more than the minimum number of elements. If after completion
|
||||
* parent has fewer than the minimum number of elements than the parents entries[0]
|
||||
* slot is left empty in anticipation of a recursive call to stealLeft, stealRight,
|
||||
* mergeLeft, or mergeRight to fix the parent. All of the before-mentioned methods
|
||||
* expect the parent to be in such a condition.
|
||||
*/
|
||||
private void mergeLeft(int parentIndex) {
|
||||
BTreeNode p = _parent;
|
||||
BTreeNode ls = p._entries[parentIndex - 1].child;
|
||||
|
||||
if (isLeaf()) { // Don't worry about children
|
||||
int add = childToInsertAt(p._entries[parentIndex - 1].element, true);
|
||||
insertNewElement(p._entries[parentIndex - 1].element, add); // Could have been a successor switch
|
||||
p._entries[parentIndex - 1].element = null;
|
||||
|
||||
for (int i = _nrElements - 1, nr = ls._nrElements; i >= 0; i--)
|
||||
_entries[i + nr] = _entries[i];
|
||||
|
||||
for (int i = ls._nrElements - 1; i >= 0; i--) {
|
||||
_entries[i] = ls._entries[i];
|
||||
_nrElements++;
|
||||
}
|
||||
|
||||
if (p._nrElements == MIN && p != BTreeSet.this.root) {
|
||||
|
||||
for (int x = parentIndex - 1, y = parentIndex - 2; y >= 0; x--, y--)
|
||||
p._entries[x] = p._entries[y];
|
||||
p._entries[0] = new Entry();
|
||||
p._entries[0].child = ls; //So p doesn't think it's a leaf this will be deleted in the next recursive call
|
||||
}
|
||||
|
||||
else {
|
||||
|
||||
for (int x = parentIndex - 1, y = parentIndex; y <= p._nrElements; x++, y++)
|
||||
p._entries[x] = p._entries[y];
|
||||
p._entries[p._nrElements] = null;
|
||||
}
|
||||
|
||||
p._nrElements--;
|
||||
|
||||
if (p.isRoot() && p._nrElements == 0) { // It's the root and it's empty
|
||||
BTreeSet.this.root = this;
|
||||
_parent = null;
|
||||
}
|
||||
}
|
||||
|
||||
else { // I'm not a leaf but fixing the tree structure
|
||||
_entries[0].element = p._entries[parentIndex - 1].element;
|
||||
_entries[0].child = ls._entries[ls._nrElements].child;
|
||||
_nrElements++;
|
||||
|
||||
for (int x = _nrElements, nr = ls._nrElements; x >= 0; x--)
|
||||
_entries[x + nr] = _entries[x];
|
||||
|
||||
for (int x = ls._nrElements - 1; x >= 0; x--) {
|
||||
_entries[x] = ls._entries[x];
|
||||
_entries[x].child._parent = this;
|
||||
_nrElements++;
|
||||
}
|
||||
|
||||
if (p._nrElements == MIN && p != BTreeSet.this.root) { // Push everything to the right
|
||||
for (int x = parentIndex - 1, y = parentIndex - 2; y >= 0; x++, y++){
|
||||
System.out.println(x + " " + y);
|
||||
p._entries[x] = p._entries[y];}
|
||||
p._entries[0] = new Entry();
|
||||
}
|
||||
|
||||
else { // Either p.nrElements > MIN or p == BTreeSet.this.root so push everything to the left
|
||||
for (int x = parentIndex - 1, y = parentIndex; y <= p._nrElements; x++, y++)
|
||||
p._entries[x] = p._entries[y];
|
||||
p._entries[p._nrElements] = null;
|
||||
}
|
||||
|
||||
p._nrElements--;
|
||||
|
||||
if (p.isRoot() && p._nrElements == 0) { // p == BTreeSet.this.root and it's empty
|
||||
BTreeSet.this.root = this;
|
||||
_parent = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* This method is called only when stealLeft, stealRight, and mergeLeft could not be called,
|
||||
* the BTreeNode has the minimum number of elements, has a rightSibling, and the
|
||||
* rightSibling has more than the minimum number of elements. If after completion
|
||||
* parent has fewer than the minimum number of elements than the parents entries[0]
|
||||
* slot is left empty in anticipation of a recursive call to stealLeft, stealRight,
|
||||
* mergeLeft, or mergeRight to fix the parent. All of the before-mentioned methods
|
||||
* expect the parent to be in such a condition.
|
||||
*/
|
||||
private void mergeRight(int parentIndex) {
|
||||
BTreeNode p = _parent;
|
||||
BTreeNode rs = p._entries[parentIndex + 1].child;
|
||||
|
||||
if (isLeaf()) { // Don't worry about children
|
||||
_entries[_nrElements] = new Entry();
|
||||
_entries[_nrElements].element = p._entries[parentIndex].element;
|
||||
_nrElements++;
|
||||
for (int i = 0, nr = _nrElements; i < rs._nrElements; i++, nr++) {
|
||||
_entries[nr] = rs._entries[i];
|
||||
_nrElements++;
|
||||
}
|
||||
p._entries[parentIndex].element = p._entries[parentIndex + 1].element;
|
||||
if (p._nrElements == MIN && p != BTreeSet.this.root) {
|
||||
for (int x = parentIndex + 1, y = parentIndex; y >= 0; x--, y--)
|
||||
p._entries[x] = p._entries[y];
|
||||
p._entries[0] = new Entry();
|
||||
p._entries[0].child = rs; // So it doesn't think it's a leaf, this child will be deleted in the next recursive call
|
||||
}
|
||||
|
||||
else {
|
||||
for (int x = parentIndex + 1, y = parentIndex + 2; y <= p._nrElements; x++, y++)
|
||||
p._entries[x] = p._entries[y];
|
||||
p._entries[p._nrElements] = null;
|
||||
}
|
||||
|
||||
p._nrElements--;
|
||||
if (p.isRoot() && p._nrElements == 0) { // It's the root and it's empty
|
||||
BTreeSet.this.root = this;
|
||||
_parent = null;
|
||||
}
|
||||
}
|
||||
|
||||
else { // It's not a leaf
|
||||
|
||||
_entries[_nrElements].element = p._entries[parentIndex].element;
|
||||
_nrElements++;
|
||||
|
||||
for (int x = _nrElements + 1, y = 0; y <= rs._nrElements; x++, y++) {
|
||||
_entries[x] = rs._entries[y];
|
||||
rs._entries[y].child._parent = this;
|
||||
_nrElements++;
|
||||
}
|
||||
_nrElements--;
|
||||
|
||||
p._entries[++parentIndex].child = this;
|
||||
|
||||
if (p._nrElements == MIN && p != BTreeSet.this.root) {
|
||||
for (int x = parentIndex - 1, y = parentIndex - 2; y >= 0; x--, y--)
|
||||
p._entries[x] = p._entries[y];
|
||||
p._entries[0] = new Entry();
|
||||
}
|
||||
|
||||
else {
|
||||
for (int x = parentIndex - 1, y = parentIndex; y <= p._nrElements; x++, y++)
|
||||
p._entries[x] = p._entries[y];
|
||||
p._entries[p._nrElements] = null;
|
||||
}
|
||||
|
||||
p._nrElements--;
|
||||
|
||||
if (p.isRoot() && p._nrElements == 0) { // It's the root and it's empty
|
||||
BTreeSet.this.root = this;
|
||||
_parent = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,40 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor.util;
|
||||
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class ChpxNode extends PropertyNode
|
||||
{
|
||||
|
||||
|
||||
public ChpxNode(int fcStart, int fcEnd, byte[] chpx)
|
||||
{
|
||||
super(fcStart, fcEnd, chpx);
|
||||
}
|
||||
public byte[] getChpx()
|
||||
{
|
||||
return super.getGrpprl();
|
||||
}
|
||||
|
||||
}
|
@ -1,83 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor.util;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* TODO Comment me
|
||||
*/
|
||||
@Deprecated
|
||||
public final class NumberFormatter
|
||||
{
|
||||
private final static int ARABIC = 0;
|
||||
private final static int UPPER_ROMAN = 1;
|
||||
private final static int LOWER_ROMAN = 2;
|
||||
private final static int UPPER_LETTER = 3;
|
||||
private final static int LOWER_LETTER = 4;
|
||||
private final static int ORDINAL = 5;
|
||||
|
||||
private static String[] _arabic = new String[] {"1", "2", "3", "4", "5", "6",
|
||||
"7", "8", "9", "10", "11", "12",
|
||||
"13", "14", "15", "16", "17", "18",
|
||||
"19", "20", "21", "22", "23",
|
||||
"24", "25", "26", "27", "28",
|
||||
"29", "30", "31", "32", "33",
|
||||
"34", "35", "36", "37", "38",
|
||||
"39", "40", "41", "42", "43",
|
||||
"44", "45", "46", "47", "48",
|
||||
"49", "50", "51", "52", "53"};
|
||||
private static String[] _roman = new String[]{"i", "ii", "iii", "iv", "v", "vi",
|
||||
"vii", "viii", "ix", "x", "xi", "xii",
|
||||
"xiii","xiv", "xv", "xvi", "xvii",
|
||||
"xviii", "xix", "xx", "xxi", "xxii",
|
||||
"xxiii", "xxiv", "xxv", "xxvi",
|
||||
"xxvii", "xxviii", "xxix", "xxx",
|
||||
"xxxi", "xxxii", "xxxiii", "xxxiv",
|
||||
"xxxv", "xxxvi", "xxxvii", "xxxvii",
|
||||
"xxxviii", "xxxix", "xl", "xli", "xlii",
|
||||
"xliii", "xliv", "xlv", "xlvi", "xlvii",
|
||||
"xlviii", "xlix", "l"};
|
||||
private static String[] _letter = new String[]{"a", "b", "c", "d", "e", "f", "g",
|
||||
"h", "i", "j", "k", "l", "m", "n",
|
||||
"o", "p", "q", "r", "s", "t", "u",
|
||||
"v", "x", "y", "z"};
|
||||
public NumberFormatter()
|
||||
{
|
||||
}
|
||||
public static String getNumber(int num, int style)
|
||||
{
|
||||
switch(style)
|
||||
{
|
||||
case ARABIC:
|
||||
return _arabic[num - 1];
|
||||
case UPPER_ROMAN:
|
||||
return _roman[num-1].toUpperCase(Locale.ROOT);
|
||||
case LOWER_ROMAN:
|
||||
return _roman[num-1];
|
||||
case UPPER_LETTER:
|
||||
return _letter[num-1].toUpperCase(Locale.ROOT);
|
||||
case LOWER_LETTER:
|
||||
return _letter[num-1];
|
||||
case ORDINAL:
|
||||
return _arabic[num - 1];
|
||||
default:
|
||||
return _arabic[num - 1];
|
||||
}
|
||||
}
|
||||
}
|
@ -1,39 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor.util;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class PapxNode extends PropertyNode
|
||||
{
|
||||
|
||||
|
||||
public PapxNode(int fcStart, int fcEnd, byte[] papx)
|
||||
{
|
||||
super(fcStart, fcEnd, papx);
|
||||
}
|
||||
public byte[] getPapx()
|
||||
{
|
||||
return super.getGrpprl();
|
||||
}
|
||||
|
||||
}
|
@ -1,65 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor.util;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public abstract class PropertyNode implements Comparable {
|
||||
private byte[] _grpprl;
|
||||
private int _fcStart;
|
||||
private int _fcEnd;
|
||||
|
||||
public PropertyNode(int fcStart, int fcEnd, byte[] grpprl)
|
||||
{
|
||||
_fcStart = fcStart;
|
||||
_fcEnd = fcEnd;
|
||||
_grpprl = grpprl;
|
||||
}
|
||||
public int getStart()
|
||||
{
|
||||
return _fcStart;
|
||||
}
|
||||
public int getEnd()
|
||||
{
|
||||
return _fcEnd;
|
||||
}
|
||||
protected byte[] getGrpprl()
|
||||
{
|
||||
return _grpprl;
|
||||
}
|
||||
public int compareTo(Object o)
|
||||
{
|
||||
int fcStart = ((PropertyNode)o).getStart();
|
||||
if(_fcStart == fcStart)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
else if(_fcStart < fcStart)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
else
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,44 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor.util;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class SepxNode extends PropertyNode
|
||||
{
|
||||
|
||||
int _index;
|
||||
|
||||
public SepxNode(int index, int start, int end, byte[] sepx)
|
||||
{
|
||||
super(start, end, sepx);
|
||||
}
|
||||
public byte[] getSepx()
|
||||
{
|
||||
return getGrpprl();
|
||||
}
|
||||
|
||||
public int compareTo(Object obj) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
}
|
@ -1,43 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.poi.hdf.event.HDFParsingListener;
|
||||
import org.apache.poi.hdf.event.EventBridge;
|
||||
|
||||
@Deprecated
|
||||
public final class HDFDocument
|
||||
{
|
||||
|
||||
HDFObjectModel _model;
|
||||
|
||||
|
||||
public HDFDocument(InputStream in, HDFParsingListener listener) throws IOException
|
||||
{
|
||||
EventBridge eb = new EventBridge(listener);
|
||||
/* HDFObjectFactory factory = */ new HDFObjectFactory(in, eb);
|
||||
}
|
||||
public HDFDocument(InputStream in) throws IOException
|
||||
{
|
||||
_model = new HDFObjectModel();
|
||||
/* HDFObjectFactory factory = */ new HDFObjectFactory(in, _model);
|
||||
}
|
||||
}
|
@ -1,604 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.poi.hdf.event.HDFLowLevelParsingListener;
|
||||
import org.apache.poi.hdf.model.hdftypes.CHPFormattedDiskPage;
|
||||
import org.apache.poi.hdf.model.hdftypes.ChpxNode;
|
||||
import org.apache.poi.hdf.model.hdftypes.DocumentProperties;
|
||||
import org.apache.poi.hdf.model.hdftypes.FileInformationBlock;
|
||||
import org.apache.poi.hdf.model.hdftypes.FontTable;
|
||||
import org.apache.poi.hdf.model.hdftypes.FormattedDiskPage;
|
||||
import org.apache.poi.hdf.model.hdftypes.ListTables;
|
||||
import org.apache.poi.hdf.model.hdftypes.PAPFormattedDiskPage;
|
||||
import org.apache.poi.hdf.model.hdftypes.PapxNode;
|
||||
import org.apache.poi.hdf.model.hdftypes.PlexOfCps;
|
||||
import org.apache.poi.hdf.model.hdftypes.SepxNode;
|
||||
import org.apache.poi.hdf.model.hdftypes.StyleSheet;
|
||||
import org.apache.poi.hdf.model.hdftypes.TextPiece;
|
||||
import org.apache.poi.hdf.model.util.ParsingState;
|
||||
import org.apache.poi.poifs.filesystem.DocumentEntry;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
import org.apache.poi.util.LittleEndian;
|
||||
|
||||
/**
|
||||
* The Object Factory takes in a stream and creates the low level objects
|
||||
* that represent the data.
|
||||
* @author andy
|
||||
*/
|
||||
@Deprecated
|
||||
public final class HDFObjectFactory {
|
||||
|
||||
/** OLE stuff*/
|
||||
private POIFSFileSystem _filesystem;
|
||||
/** The FIB*/
|
||||
private FileInformationBlock _fib;
|
||||
|
||||
/** Used to set up the object model*/
|
||||
private HDFLowLevelParsingListener _listener;
|
||||
/** parsing state for characters */
|
||||
private ParsingState _charParsingState;
|
||||
/** parsing state for paragraphs */
|
||||
private ParsingState _parParsingState;
|
||||
|
||||
/** main document stream buffer*/
|
||||
byte[] _mainDocument;
|
||||
/** table stream buffer*/
|
||||
byte[] _tableBuffer;
|
||||
|
||||
|
||||
public static void main(String args[]) throws Exception {
|
||||
new HDFObjectFactory(new FileInputStream("c:\\test.doc"));
|
||||
}
|
||||
|
||||
/** Creates a new instance of HDFObjectFactory
|
||||
*
|
||||
* @param istream The InputStream that is the Word document
|
||||
*
|
||||
*/
|
||||
protected HDFObjectFactory(InputStream istream, HDFLowLevelParsingListener l) throws IOException
|
||||
{
|
||||
if (l == null)
|
||||
{
|
||||
_listener = new HDFObjectModel();
|
||||
}
|
||||
else
|
||||
{
|
||||
_listener = l;
|
||||
}
|
||||
|
||||
//do Ole stuff
|
||||
_filesystem = new POIFSFileSystem(istream);
|
||||
|
||||
DocumentEntry headerProps =
|
||||
(DocumentEntry)_filesystem.getRoot().getEntry("WordDocument");
|
||||
|
||||
_mainDocument = new byte[headerProps.getSize()];
|
||||
_filesystem.createDocumentInputStream("WordDocument").read(_mainDocument);
|
||||
|
||||
_fib = new FileInformationBlock(_mainDocument);
|
||||
|
||||
initTableStream();
|
||||
initTextPieces();
|
||||
initFormattingProperties();
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/** Creates a new instance of HDFObjectFactory
|
||||
*
|
||||
* @param istream The InputStream that is the Word document
|
||||
*
|
||||
*/
|
||||
public HDFObjectFactory(InputStream istream) throws IOException
|
||||
{
|
||||
this(istream, null);
|
||||
}
|
||||
|
||||
public static List<FileInformationBlock> getTypes(InputStream istream) throws IOException
|
||||
{
|
||||
List<FileInformationBlock> results = new ArrayList<FileInformationBlock>(1);
|
||||
|
||||
//do Ole stuff
|
||||
POIFSFileSystem filesystem = null;
|
||||
try {
|
||||
filesystem = new POIFSFileSystem(istream);
|
||||
DocumentEntry headerProps =
|
||||
(DocumentEntry)filesystem.getRoot().getEntry("WordDocument");
|
||||
|
||||
byte[] mainDocument = new byte[headerProps.getSize()];
|
||||
filesystem.createDocumentInputStream("WordDocument").read(mainDocument);
|
||||
|
||||
FileInformationBlock fib = new FileInformationBlock(mainDocument);
|
||||
|
||||
|
||||
results.add(fib);
|
||||
return results;
|
||||
} finally {
|
||||
if (filesystem != null) filesystem.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Initializes the table stream
|
||||
*
|
||||
* @throws IOException
|
||||
*/
|
||||
private void initTableStream() throws IOException
|
||||
{
|
||||
String tablename = null;
|
||||
if(_fib.isFWhichTblStm())
|
||||
{
|
||||
tablename="1Table";
|
||||
}
|
||||
else
|
||||
{
|
||||
tablename="0Table";
|
||||
}
|
||||
|
||||
DocumentEntry tableEntry = (DocumentEntry)_filesystem.getRoot().getEntry(tablename);
|
||||
|
||||
//load the table stream into a buffer
|
||||
int size = tableEntry.getSize();
|
||||
_tableBuffer = new byte[size];
|
||||
_filesystem.createDocumentInputStream(tablename).read(_tableBuffer);
|
||||
}
|
||||
/**
|
||||
* Initializes the text pieces. Text is divided into pieces because some
|
||||
* "pieces" may only contain unicode characters.
|
||||
*
|
||||
* @throws IOException
|
||||
*/
|
||||
private void initTextPieces() throws IOException
|
||||
{
|
||||
int pos = _fib.getFcClx();
|
||||
|
||||
//skips through the prms before we reach the piece table. These contain data
|
||||
//for actual fast saved files
|
||||
while (_tableBuffer[pos] == 1)
|
||||
{
|
||||
pos++;
|
||||
int skip = LittleEndian.getShort(_tableBuffer, pos);
|
||||
pos += 2 + skip;
|
||||
}
|
||||
if(_tableBuffer[pos] != 2)
|
||||
{
|
||||
throw new IOException("The text piece table is corrupted");
|
||||
}
|
||||
//parse out the text pieces
|
||||
int pieceTableSize = LittleEndian.getInt(_tableBuffer, ++pos);
|
||||
pos += 4;
|
||||
int pieces = (pieceTableSize - 4) / 12;
|
||||
for (int x = 0; x < pieces; x++) {
|
||||
int filePos = LittleEndian.getInt(_tableBuffer, pos + ((pieces + 1) * 4) + (x * 8) + 2);
|
||||
boolean unicode = false;
|
||||
if ((filePos & 0x40000000) == 0) {
|
||||
unicode = true;
|
||||
} else {
|
||||
unicode = false;
|
||||
filePos &= ~(0x40000000);//gives me FC in doc stream
|
||||
filePos /= 2;
|
||||
}
|
||||
int totLength = LittleEndian.getInt(_tableBuffer, pos + (x + 1) * 4) -
|
||||
LittleEndian.getInt(_tableBuffer, pos + (x * 4));
|
||||
|
||||
TextPiece piece = new TextPiece(filePos, totLength, unicode);
|
||||
_listener.text(piece);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* initializes all of the formatting properties for a Word Document
|
||||
*/
|
||||
private void initFormattingProperties()
|
||||
{
|
||||
createStyleSheet();
|
||||
createListTables();
|
||||
createFontTable();
|
||||
|
||||
initDocumentProperties();
|
||||
initSectionProperties();
|
||||
//initCharacterProperties();
|
||||
//initParagraphProperties();
|
||||
}
|
||||
private void initCharacterProperties(int charOffset, PlexOfCps charPlcf, int start, int end)
|
||||
{
|
||||
//Initialize paragraph property stuff
|
||||
//int currentCharPage = _charParsingState.getCurrentPage();
|
||||
int charPlcfLen = charPlcf.length();
|
||||
int currentPageIndex = _charParsingState.getCurrentPageIndex();
|
||||
FormattedDiskPage fkp = _charParsingState.getFkp();
|
||||
int currentChpxIndex = _charParsingState.getCurrentPropIndex();
|
||||
int currentArraySize = fkp.size();
|
||||
|
||||
//get the character runs for this paragraph
|
||||
int charStart = 0;
|
||||
int charEnd = 0;
|
||||
//add the character runs
|
||||
do
|
||||
{
|
||||
if (currentChpxIndex < currentArraySize)
|
||||
{
|
||||
charStart = fkp.getStart(currentChpxIndex);
|
||||
charEnd = fkp.getEnd(currentChpxIndex);
|
||||
byte[] chpx = fkp.getGrpprl(currentChpxIndex);
|
||||
_listener.characterRun(new ChpxNode(Math.max(charStart, start), Math.min(charEnd, end), chpx));
|
||||
|
||||
if (charEnd < end)
|
||||
{
|
||||
currentChpxIndex++;
|
||||
}
|
||||
else
|
||||
{
|
||||
_charParsingState.setState(currentPageIndex, fkp, currentChpxIndex);
|
||||
break;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
int currentCharPage = LittleEndian.getInt(_tableBuffer, charOffset + charPlcf.getStructOffset(++currentPageIndex));
|
||||
byte[] byteFkp = new byte[512];
|
||||
System.arraycopy(_mainDocument, (currentCharPage * 512), byteFkp, 0, 512);
|
||||
fkp = new CHPFormattedDiskPage(byteFkp);
|
||||
currentChpxIndex = 0;
|
||||
currentArraySize = fkp.size();
|
||||
}
|
||||
}
|
||||
while(currentPageIndex < charPlcfLen);
|
||||
}
|
||||
private void initParagraphProperties(int parOffset, PlexOfCps parPlcf, int charOffset, PlexOfCps charPlcf, int start, int end)
|
||||
{
|
||||
//Initialize paragraph property stuff
|
||||
//int currentParPage = _parParsingState.getCurrentPage();
|
||||
int parPlcfLen = parPlcf.length();
|
||||
int currentPageIndex = _parParsingState.getCurrentPageIndex();
|
||||
FormattedDiskPage fkp = _parParsingState.getFkp();
|
||||
int currentPapxIndex = _parParsingState.getCurrentPropIndex();
|
||||
int currentArraySize = fkp.size();
|
||||
|
||||
do
|
||||
{
|
||||
if (currentPapxIndex < currentArraySize)
|
||||
{
|
||||
int parStart = fkp.getStart(currentPapxIndex);
|
||||
int parEnd = fkp.getEnd(currentPapxIndex);
|
||||
byte[] papx = fkp.getGrpprl(currentPapxIndex);
|
||||
_listener.paragraph(new PapxNode(Math.max(parStart, start), Math.min(parEnd, end), papx));
|
||||
initCharacterProperties(charOffset, charPlcf, Math.max(start, parStart), Math.min(parEnd, end));
|
||||
if (parEnd < end)
|
||||
{
|
||||
currentPapxIndex++;
|
||||
}
|
||||
else
|
||||
{
|
||||
//save the state
|
||||
_parParsingState.setState(currentPageIndex, fkp, currentPapxIndex);
|
||||
break;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
int currentParPage = LittleEndian.getInt(_tableBuffer, parOffset + parPlcf.getStructOffset(++currentPageIndex));
|
||||
byte byteFkp[] = new byte[512];
|
||||
System.arraycopy(_mainDocument, (currentParPage * 512), byteFkp, 0, 512);
|
||||
fkp = new PAPFormattedDiskPage(byteFkp);
|
||||
currentPapxIndex = 0;
|
||||
currentArraySize = fkp.size();
|
||||
}
|
||||
}
|
||||
while(currentPageIndex < parPlcfLen);
|
||||
}
|
||||
/**
|
||||
* initializes the CharacterProperties BTree
|
||||
*/
|
||||
/*private void initCharacterProperties()
|
||||
{
|
||||
int charOffset = _fib.getFcPlcfbteChpx();
|
||||
int charPlcSize = _fib.getLcbPlcfbteChpx();
|
||||
|
||||
//int arraySize = (charPlcSize - 4)/8;
|
||||
|
||||
//first we must go through the bin table and find the fkps
|
||||
for(int x = 0; x < arraySize; x++)
|
||||
{
|
||||
|
||||
//get page number(has nothing to do with document page)
|
||||
//containing the chpx for the paragraph
|
||||
int PN = LittleEndian.getInt(_tableBuffer, charOffset + (4 * (arraySize + 1) + (4 * x)));
|
||||
|
||||
byte[] fkp = new byte[512];
|
||||
System.arraycopy(_mainDocument, (PN * 512), fkp, 0, 512);
|
||||
//take each fkp and get the chpxs
|
||||
int crun = LittleEndian.getUnsignedByte(fkp, 511);
|
||||
for(int y = 0; y < crun; y++)
|
||||
{
|
||||
//get the beginning fc of each paragraph text run
|
||||
int fcStart = LittleEndian.getInt(fkp, y * 4);
|
||||
int fcEnd = LittleEndian.getInt(fkp, (y+1) * 4);
|
||||
//get the offset in fkp of the papx for this paragraph
|
||||
int chpxOffset = 2 * LittleEndian.getUnsignedByte(fkp, ((crun + 1) * 4) + y);
|
||||
|
||||
//optimization if offset == 0 use "Normal" style
|
||||
if(chpxOffset == 0)
|
||||
|
||||
{
|
||||
_characterRuns.add(new ChpxNode(fcStart, fcEnd, new byte[0]));
|
||||
continue;
|
||||
}
|
||||
|
||||
int size = LittleEndian.getUnsignedByte(fkp, chpxOffset);
|
||||
|
||||
byte[] chpx = new byte[size];
|
||||
System.arraycopy(fkp, ++chpxOffset, chpx, 0, size);
|
||||
//_papTable.put(Integer.valueOf(fcStart), papx);
|
||||
_characterRuns.add(new ChpxNode(fcStart, fcEnd, chpx));
|
||||
}
|
||||
|
||||
}
|
||||
}*/
|
||||
/**
|
||||
* intializes the Paragraph Properties BTree
|
||||
*/
|
||||
@SuppressWarnings("unused")
|
||||
private void initParagraphProperties()
|
||||
{
|
||||
//paragraphs
|
||||
int parOffset = _fib.getFcPlcfbtePapx();
|
||||
int parPlcSize = _fib.getLcbPlcfbtePapx();
|
||||
|
||||
//characters
|
||||
int charOffset = _fib.getFcPlcfbteChpx();
|
||||
int charPlcSize = _fib.getLcbPlcfbteChpx();
|
||||
|
||||
PlexOfCps charPlcf = new PlexOfCps(charPlcSize, 4);
|
||||
PlexOfCps parPlcf = new PlexOfCps(parPlcSize, 4);
|
||||
|
||||
//Initialize character property stuff
|
||||
int currentCharPage = LittleEndian.getInt(_tableBuffer, charOffset + charPlcf.getStructOffset(0));
|
||||
int charPlcfLen = charPlcf.length();
|
||||
int currentPageIndex = 0;
|
||||
byte[] fkp = new byte[512];
|
||||
System.arraycopy(_mainDocument, (currentCharPage * 512), fkp, 0, 512);
|
||||
CHPFormattedDiskPage cfkp = new CHPFormattedDiskPage(fkp);
|
||||
int currentChpxIndex = 0;
|
||||
int currentArraySize = cfkp.size();
|
||||
|
||||
|
||||
int arraySize = parPlcf.length();
|
||||
|
||||
//first we must go through the bin table and find the fkps
|
||||
for(int x = 0; x < arraySize; x++)
|
||||
{
|
||||
int PN = LittleEndian.getInt(_tableBuffer, parOffset + parPlcf.getStructOffset(x));
|
||||
|
||||
fkp = new byte[512];
|
||||
System.arraycopy(_mainDocument, (PN * 512), fkp, 0, 512);
|
||||
|
||||
PAPFormattedDiskPage pfkp = new PAPFormattedDiskPage(fkp);
|
||||
//take each fkp and get the paps
|
||||
int crun = pfkp.size();
|
||||
for(int y = 0; y < crun; y++)
|
||||
{
|
||||
//get the beginning fc of each paragraph text run
|
||||
int fcStart = pfkp.getStart(y);
|
||||
int fcEnd = pfkp.getEnd(y);
|
||||
|
||||
//get the papx for this paragraph
|
||||
byte[] papx = pfkp.getGrpprl(y);
|
||||
|
||||
_listener.paragraph(new PapxNode(fcStart, fcEnd, papx));
|
||||
|
||||
//get the character runs for this paragraph
|
||||
int charStart = 0;
|
||||
int charEnd = 0;
|
||||
//add the character runs
|
||||
do
|
||||
{
|
||||
if (currentChpxIndex < currentArraySize)
|
||||
{
|
||||
charStart = cfkp.getStart(currentChpxIndex);
|
||||
charEnd = cfkp.getEnd(currentChpxIndex);
|
||||
byte[] chpx = cfkp.getGrpprl(currentChpxIndex);
|
||||
_listener.characterRun(new ChpxNode(charStart, charEnd, chpx));
|
||||
if (charEnd < fcEnd)
|
||||
{
|
||||
currentChpxIndex++;
|
||||
}
|
||||
else
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
currentCharPage = LittleEndian.getInt(_tableBuffer, charOffset + charPlcf.getStructOffset(++currentPageIndex));
|
||||
fkp = new byte[512];
|
||||
System.arraycopy(_mainDocument, (currentCharPage * 512), fkp, 0, 512);
|
||||
cfkp = new CHPFormattedDiskPage(fkp);
|
||||
currentChpxIndex = 0;
|
||||
currentArraySize = cfkp.size();
|
||||
}
|
||||
}
|
||||
while(currentCharPage <= charPlcfLen + 1);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
private void initParsingStates(int parOffset, PlexOfCps parPlcf, int charOffset, PlexOfCps charPlcf)
|
||||
{
|
||||
int currentCharPage = LittleEndian.getInt(_tableBuffer, charOffset + charPlcf.getStructOffset(0));
|
||||
byte[] fkp = new byte[512];
|
||||
System.arraycopy(_mainDocument, (currentCharPage * 512), fkp, 0, 512);
|
||||
CHPFormattedDiskPage cfkp = new CHPFormattedDiskPage(fkp);
|
||||
_charParsingState = new ParsingState(currentCharPage, cfkp);
|
||||
|
||||
int currentParPage = LittleEndian.getInt(_tableBuffer, parOffset + parPlcf.getStructOffset(0));
|
||||
fkp = new byte[512];
|
||||
System.arraycopy(_mainDocument, (currentParPage * 512), fkp, 0, 512);
|
||||
PAPFormattedDiskPage pfkp = new PAPFormattedDiskPage(fkp);
|
||||
_parParsingState = new ParsingState(currentParPage, pfkp);
|
||||
}
|
||||
/**
|
||||
* initializes the SectionProperties BTree
|
||||
*/
|
||||
@SuppressWarnings("unused")
|
||||
private void initSectionProperties()
|
||||
{
|
||||
|
||||
int ccpText = _fib.getCcpText();
|
||||
// int ccpFtn = _fib.getCcpFtn();
|
||||
|
||||
//sections
|
||||
int fcMin = _fib.getFcMin();
|
||||
int plcfsedFC = _fib.getFcPlcfsed();
|
||||
int plcfsedSize = _fib.getLcbPlcfsed();
|
||||
|
||||
//paragraphs
|
||||
int parOffset = _fib.getFcPlcfbtePapx();
|
||||
int parPlcSize = _fib.getLcbPlcfbtePapx();
|
||||
|
||||
//characters
|
||||
int charOffset = _fib.getFcPlcfbteChpx();
|
||||
int charPlcSize = _fib.getLcbPlcfbteChpx();
|
||||
|
||||
PlexOfCps charPlcf = new PlexOfCps(charPlcSize, 4);
|
||||
PlexOfCps parPlcf = new PlexOfCps(parPlcSize, 4);
|
||||
|
||||
initParsingStates(parOffset, parPlcf, charOffset, charPlcf);
|
||||
|
||||
//byte[] plcfsed = new byte[plcfsedSize];
|
||||
//System.arraycopy(_tableBuffer, plcfsedFC, plcfsed, 0, plcfsedSize);
|
||||
|
||||
PlexOfCps plcfsed = new PlexOfCps(plcfsedSize, 12);
|
||||
int arraySize = plcfsed.length();
|
||||
|
||||
int start = fcMin;
|
||||
int end = fcMin + ccpText;
|
||||
int x = 0;
|
||||
int sectionEnd = 0;
|
||||
|
||||
//do the main body sections
|
||||
while (x < arraySize)
|
||||
{
|
||||
int sectionStart = LittleEndian.getInt(_tableBuffer, plcfsedFC + plcfsed.getIntOffset(x)) + fcMin;
|
||||
sectionEnd = LittleEndian.getInt(_tableBuffer, plcfsedFC + plcfsed.getIntOffset(x + 1)) + fcMin;
|
||||
int sepxStart = LittleEndian.getInt(_tableBuffer, plcfsedFC + plcfsed.getStructOffset(x) + 2);
|
||||
int sepxSize = LittleEndian.getShort(_mainDocument, sepxStart);
|
||||
|
||||
byte[] sepx = new byte[sepxSize];
|
||||
System.arraycopy(_mainDocument, sepxStart + 2, sepx, 0, sepxSize);
|
||||
SepxNode node = new SepxNode(x + 1, sectionStart, sectionEnd, sepx);
|
||||
_listener.bodySection(node);
|
||||
initParagraphProperties(parOffset, parPlcf, charOffset, charPlcf, sectionStart, Math.min(end, sectionEnd));
|
||||
|
||||
if (sectionEnd > end)
|
||||
{
|
||||
break;
|
||||
}
|
||||
x++;
|
||||
}
|
||||
//do the header sections
|
||||
for (; x < arraySize; x++)// && sectionEnd <= end; x++)
|
||||
{
|
||||
int sectionStart = LittleEndian.getInt(_tableBuffer, plcfsedFC + plcfsed.getIntOffset(x)) + fcMin;
|
||||
sectionEnd = LittleEndian.getInt(_tableBuffer, plcfsedFC + plcfsed.getIntOffset(x + 1)) + fcMin;
|
||||
int sepxStart = LittleEndian.getInt(_tableBuffer, plcfsedFC + plcfsed.getStructOffset(x) + 2);
|
||||
int sepxSize = LittleEndian.getShort(_mainDocument, sepxStart);
|
||||
|
||||
byte[] sepx = new byte[sepxSize];
|
||||
System.arraycopy(_mainDocument, sepxStart + 2, sepx, 0, sepxSize);
|
||||
SepxNode node = new SepxNode(x + 1, sectionStart, sectionEnd, sepx);
|
||||
_listener.hdrSection(node);
|
||||
initParagraphProperties(parOffset, parPlcf, charOffset, charPlcf, Math.max(sectionStart, end), sectionEnd);
|
||||
|
||||
}
|
||||
_listener.endSections();
|
||||
}
|
||||
/**
|
||||
* Initializes the DocumentProperties object unique to this document.
|
||||
*/
|
||||
private void initDocumentProperties()
|
||||
{
|
||||
int pos = _fib.getFcDop();
|
||||
int size = _fib.getLcbDop();
|
||||
byte[] dopArray = new byte[size];
|
||||
|
||||
System.arraycopy(_tableBuffer, pos, dopArray, 0, size);
|
||||
_listener.document(new DocumentProperties(dopArray));
|
||||
}
|
||||
/**
|
||||
* Uncompresses the StyleSheet from file into memory.
|
||||
*/
|
||||
private void createStyleSheet()
|
||||
{
|
||||
int stshIndex = _fib.getFcStshf();
|
||||
int stshSize = _fib.getLcbStshf();
|
||||
byte[] stsh = new byte[stshSize];
|
||||
System.arraycopy(_tableBuffer, stshIndex, stsh, 0, stshSize);
|
||||
|
||||
_listener.styleSheet(new StyleSheet(stsh));
|
||||
}
|
||||
/**
|
||||
* Initializes the list tables for this document
|
||||
*/
|
||||
private void createListTables()
|
||||
{
|
||||
int lfoOffset = _fib.getFcPlfLfo();
|
||||
int lfoSize = _fib.getLcbPlfLfo();
|
||||
byte[] plflfo = new byte[lfoSize];
|
||||
|
||||
System.arraycopy(_tableBuffer, lfoOffset, plflfo, 0, lfoSize);
|
||||
|
||||
int lstOffset = _fib.getFcPlcfLst();
|
||||
int lstSize = _fib.getLcbPlcfLst();
|
||||
if (lstOffset > 0 && lstSize > 0)
|
||||
{
|
||||
// The lstSize returned by _fib.getLcbPlcfLst() doesn't appear
|
||||
// to take into account any LVLs. Therefore, we recalculate
|
||||
// lstSize based on where the LFO section begins (because the
|
||||
// LFO section immediately follows the LST section).
|
||||
lstSize = lfoOffset - lstOffset;
|
||||
byte[] plcflst = new byte[lstSize];
|
||||
System.arraycopy(_tableBuffer, lstOffset, plcflst, 0, lstSize);
|
||||
_listener.lists(new ListTables(plcflst, plflfo));
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Initializes this document's FontTable;
|
||||
*/
|
||||
private void createFontTable()
|
||||
{
|
||||
int fontTableIndex = _fib.getFcSttbfffn();
|
||||
int fontTableSize = _fib.getLcbSttbfffn();
|
||||
byte[] fontTable = new byte[fontTableSize];
|
||||
System.arraycopy(_tableBuffer, fontTableIndex, fontTable, 0, fontTableSize);
|
||||
_listener.fonts(new FontTable(fontTable));
|
||||
}
|
||||
|
||||
}
|
@ -1,114 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model;
|
||||
|
||||
import org.apache.poi.hdf.event.HDFLowLevelParsingListener;
|
||||
import org.apache.poi.hdf.model.util.BTreeSet;
|
||||
|
||||
import org.apache.poi.hdf.model.hdftypes.ChpxNode;
|
||||
import org.apache.poi.hdf.model.hdftypes.PapxNode;
|
||||
import org.apache.poi.hdf.model.hdftypes.SepxNode;
|
||||
import org.apache.poi.hdf.model.hdftypes.TextPiece;
|
||||
import org.apache.poi.hdf.model.hdftypes.DocumentProperties;
|
||||
import org.apache.poi.hdf.model.hdftypes.FontTable;
|
||||
import org.apache.poi.hdf.model.hdftypes.ListTables;
|
||||
import org.apache.poi.hdf.model.hdftypes.StyleSheet;
|
||||
|
||||
|
||||
@Deprecated
|
||||
public final class HDFObjectModel implements HDFLowLevelParsingListener
|
||||
{
|
||||
|
||||
/** "WordDocument" from the POIFS */
|
||||
private byte[] _mainDocument;
|
||||
|
||||
/** The DOP*/
|
||||
private DocumentProperties _dop;
|
||||
/**the StyleSheet*/
|
||||
private StyleSheet _styleSheet;
|
||||
/**list info */
|
||||
private ListTables _listTables;
|
||||
/** Font info */
|
||||
private FontTable _fonts;
|
||||
|
||||
/** text offset in main stream */
|
||||
int _fcMin;
|
||||
|
||||
/** text pieces */
|
||||
BTreeSet _text = new BTreeSet();
|
||||
/** document sections */
|
||||
BTreeSet _sections = new BTreeSet();
|
||||
/** document paragraphs */
|
||||
BTreeSet _paragraphs = new BTreeSet();
|
||||
/** document character runs */
|
||||
BTreeSet _characterRuns = new BTreeSet();
|
||||
|
||||
public HDFObjectModel()
|
||||
{
|
||||
}
|
||||
public void mainDocument(byte[] mainDocument)
|
||||
{
|
||||
_mainDocument = mainDocument;
|
||||
}
|
||||
public void tableStream(byte[] tableStream)
|
||||
{
|
||||
}
|
||||
public void miscellaneous(int fcMin, int ccpText, int ccpFtn, int fcPlcfhdd, int lcbPlcfhdd)
|
||||
{
|
||||
_fcMin = fcMin;
|
||||
}
|
||||
public void document(DocumentProperties dop)
|
||||
{
|
||||
_dop = dop;
|
||||
}
|
||||
public void bodySection(SepxNode sepx)
|
||||
{
|
||||
_sections.add(sepx);
|
||||
}
|
||||
public void hdrSection(SepxNode sepx)
|
||||
{
|
||||
_sections.add(sepx);
|
||||
}
|
||||
public void endSections()
|
||||
{
|
||||
}
|
||||
public void paragraph(PapxNode papx)
|
||||
{
|
||||
_paragraphs.add(papx);
|
||||
}
|
||||
public void characterRun(ChpxNode chpx)
|
||||
{
|
||||
_characterRuns.add(chpx);
|
||||
}
|
||||
public void text(TextPiece t)
|
||||
{
|
||||
_text.add(t);
|
||||
}
|
||||
public void fonts(FontTable fontTbl)
|
||||
{
|
||||
_fonts = fontTbl;
|
||||
}
|
||||
public void lists(ListTables listTbl)
|
||||
{
|
||||
_listTables = listTbl;
|
||||
}
|
||||
public void styleSheet(StyleSheet stsh)
|
||||
{
|
||||
_styleSheet = stsh;
|
||||
}
|
||||
}
|
@ -1,78 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
import org.apache.poi.util.LittleEndian;
|
||||
|
||||
/**
|
||||
* Represents a CHP fkp. The style properties for paragraph and character runs
|
||||
* are stored in fkps. There are PAP fkps for paragraph properties and CHP fkps
|
||||
* for character run properties. The first part of the fkp for both CHP and PAP
|
||||
* fkps consists of an array of 4 byte int offsets that represent a
|
||||
* Paragraph's or Character run's text offset in the main stream. The ending
|
||||
* offset is the next value in the array. For example, if an fkp has X number of
|
||||
* Paragraph's stored in it then there are (x + 1) 4 byte ints in the beginning
|
||||
* array. The number X is determined by the last byte in a 512 byte fkp.
|
||||
*
|
||||
* CHP and PAP fkps also store the compressed styles(grpprl) that correspond to
|
||||
* the offsets on the front of the fkp. The offset of the grpprls is determined
|
||||
* differently for CHP fkps and PAP fkps.
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class CHPFormattedDiskPage extends FormattedDiskPage
|
||||
{
|
||||
|
||||
|
||||
/**
|
||||
* This constructs a CHPFormattedDiskPage from a raw fkp (512 byte array
|
||||
* read from a Word file).
|
||||
*
|
||||
* @param fkp The 512 byte array to read data from
|
||||
*/
|
||||
public CHPFormattedDiskPage(byte[] fkp)
|
||||
{
|
||||
super(fkp);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the chpx for the character run at index in this fkp.
|
||||
*
|
||||
* @param index The index of the chpx to get.
|
||||
* @return a chpx grpprl.
|
||||
*/
|
||||
public byte[] getGrpprl(int index)
|
||||
{
|
||||
int chpxOffset = 2 * LittleEndian.getUnsignedByte(_fkp, ((_crun + 1) * 4) + index);
|
||||
|
||||
//optimization if offset == 0 use "Normal" style
|
||||
if(chpxOffset == 0)
|
||||
{
|
||||
return new byte[0];
|
||||
|
||||
}
|
||||
|
||||
int size = LittleEndian.getUnsignedByte(_fkp, chpxOffset);
|
||||
|
||||
byte[] chpx = new byte[size];
|
||||
|
||||
System.arraycopy(_fkp, ++chpxOffset, chpx, 0, size);
|
||||
return chpx;
|
||||
}
|
||||
}
|
@ -1,57 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
import org.apache.poi.hdf.model.hdftypes.definitions.CHPAbstractType;
|
||||
/**
|
||||
* Properties for character runs.
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class CharacterProperties extends CHPAbstractType implements Cloneable
|
||||
{
|
||||
|
||||
public CharacterProperties()
|
||||
{
|
||||
setDttmRMark(new short[2]);
|
||||
setDttmRMarkDel(new short[2]);
|
||||
setXstDispFldRMark(new byte[32]);
|
||||
setBrc(new short[2]);;
|
||||
setHps(20);
|
||||
setFcPic(-1);
|
||||
setIstd(10);
|
||||
setLidFE(0x0400);
|
||||
setLidDefault(0x0400);
|
||||
setWCharScale(100);
|
||||
//setFUsePgsuSettings(-1);
|
||||
}
|
||||
/**
|
||||
* Used to make a deep copy of this object.
|
||||
*/
|
||||
public Object clone() throws CloneNotSupportedException
|
||||
{
|
||||
CharacterProperties clone = (CharacterProperties)super.clone();
|
||||
clone.setBrc(new short[2]);
|
||||
System.arraycopy(getBrc(), 0, clone.getBrc(), 0, 2);
|
||||
System.arraycopy(getDttmRMark(), 0, clone.getDttmRMark(), 0, 2);
|
||||
System.arraycopy(getDttmRMarkDel(), 0, clone.getDttmRMarkDel(), 0, 2);
|
||||
System.arraycopy(getXstDispFldRMark(), 0, clone.getXstDispFldRMark(), 0, 32);
|
||||
return clone;
|
||||
}
|
||||
}
|
@ -1,40 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class ChpxNode extends PropertyNode
|
||||
{
|
||||
|
||||
|
||||
public ChpxNode(int fcStart, int fcEnd, byte[] chpx)
|
||||
{
|
||||
super(fcStart, fcEnd, chpx);
|
||||
}
|
||||
public byte[] getChpx()
|
||||
{
|
||||
return super.getGrpprl();
|
||||
}
|
||||
|
||||
}
|
@ -1,52 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
import org.apache.poi.util.LittleEndian;
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class DocumentProperties implements HDFType
|
||||
{
|
||||
|
||||
public boolean _fFacingPages;
|
||||
public int _fpc;
|
||||
public int _epc;
|
||||
public int _rncFtn;
|
||||
public int _nFtn;
|
||||
public int _rncEdn;
|
||||
public int _nEdn;
|
||||
|
||||
public DocumentProperties(byte[] dopArray)
|
||||
{
|
||||
_fFacingPages = (dopArray[0] & 0x1) > 0;
|
||||
_fpc = (dopArray[0] & 0x60) >> 5;
|
||||
|
||||
short num = LittleEndian.getShort(dopArray, 2);
|
||||
_rncFtn = (num & 0x3);
|
||||
_nFtn = (short)(num & 0xfffc) >> 2;
|
||||
num = LittleEndian.getShort(dopArray, 52);
|
||||
_rncEdn = num & 0x3;
|
||||
_nEdn = (short)(num & 0xfffc) >> 2;
|
||||
num = LittleEndian.getShort(dopArray, 54);
|
||||
_epc = num & 0x3;
|
||||
}
|
||||
}
|
@ -1,322 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
|
||||
import org.apache.poi.hdf.model.hdftypes.definitions.FIBAbstractType;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author andy
|
||||
*/
|
||||
@Deprecated
|
||||
public final class FileInformationBlock extends FIBAbstractType
|
||||
{
|
||||
/*
|
||||
private short field_1_id;
|
||||
private short field_2_version; // 101 = Word 6.0 +
|
||||
private short field_3_product_version;
|
||||
private short field_4_language_stamp;
|
||||
private short field_5_unknown;
|
||||
private short field_6_options;
|
||||
|
||||
private static final BitField template = BitFieldFactory.getInstance(0x0001);
|
||||
private static final BitField glossary = BitFieldFactory.getInstance(0x0002);
|
||||
private static final BitField quicksave = BitFieldFactory.getInstance(0x0004);
|
||||
private static final BitField haspictr = BitFieldFactory.getInstance(0x0008);
|
||||
private static final BitField nquicksaves = BitFieldFactory.getInstance(0x00F0);
|
||||
private static final BitField encrypted = BitFieldFactory.getInstance(0x0100);
|
||||
private static final BitField tabletype = BitFieldFactory.getInstance(0x0200);
|
||||
private static final BitField readonly = BitFieldFactory.getInstance(0x0400);
|
||||
private static final BitField writeReservation = BitFieldFactory.getInstance(0x0800);
|
||||
private static final BitField extendedCharacter = BitFieldFactory.getInstance(0x1000);
|
||||
private static final BitField loadOverride = BitFieldFactory.getInstance(0x2000);
|
||||
private static final BitField farEast = BitFieldFactory.getInstance(0x4000);
|
||||
private static final BitField crypto = BitFieldFactory.getInstance(0x8000);
|
||||
|
||||
private short field_7_minversion;
|
||||
private short field_8_encrypted_key;
|
||||
private short field_9_environment; // 0 or 1 - windows or mac
|
||||
private short field_10_history;
|
||||
|
||||
private static final BitField history_mac = BitFieldFactory.getInstance(0x01);
|
||||
private static final BitField empty_special = BitFieldFactory.getInstance(0x02);
|
||||
private static final BitField load_override = BitFieldFactory.getInstance(0x04);
|
||||
private static final BitField future_undo = BitFieldFactory.getInstance(0x08);
|
||||
private static final BitField w97_saved = BitFieldFactory.getInstance(0x10);
|
||||
private static final BitField spare = BitFieldFactory.getInstance(0xfe);
|
||||
|
||||
private short field_11_default_charset;
|
||||
private short field_12_default_extcharset;
|
||||
private int field_13_offset_first_char;
|
||||
private int field_14_offset_last_char;
|
||||
private short field_15_count_shorts;
|
||||
|
||||
private short field_16_beg_shorts; //why same offset?
|
||||
|
||||
private short field_16_creator_id;
|
||||
private short field_17_revisor_id;
|
||||
private short field_18_creator_private;
|
||||
private short field_19_revisor_private;
|
||||
|
||||
private short field_20_unused;
|
||||
private short field_21_unused;
|
||||
private short field_22_unused;
|
||||
private short field_23_unused;
|
||||
private short field_24_unused;
|
||||
private short field_25_unused;
|
||||
private short field_26_unused;
|
||||
private short field_27_unused;
|
||||
private short field_28_unused;
|
||||
|
||||
private short field_29_fareastid;
|
||||
private short field_30_count_ints;
|
||||
|
||||
private int field_31_beg_ints; //why same offset?
|
||||
|
||||
private int field_31_last_byte;
|
||||
|
||||
private int field_32_creator_build_date;
|
||||
private int field_33_revisor_build_date; */
|
||||
/** length of main document text stream*/
|
||||
// private int field_34_main_streamlen;
|
||||
/**length of footnote subdocument text stream*/
|
||||
/* private int field_35_footnote_streamlen;
|
||||
private int field_36_header_streamlen;
|
||||
private int field_37_macro_streamlen;
|
||||
private int field_38_annotation_streamlen;
|
||||
private int field_39_endnote_streamlen;
|
||||
private int field_40_textbox_streamlen;
|
||||
private int field_41_headbox_streamlen; */
|
||||
/**offset in table stream of character property bin table*/
|
||||
// private int field_42_pointer_to_plc_list_chp; //rename me!
|
||||
// private int field_43_first_chp; //rename me
|
||||
// private int field_44_count_chps; //rename me
|
||||
/**offset in table stream of paragraph property bin */
|
||||
/* private int field_45_pointer_to_plc_list_pap; //rename me.
|
||||
private int field_46_first_pap; //rename me
|
||||
private int field_47_count_paps; //rename me
|
||||
private int field_48_pointer_to_plc_list_lvc; //rename me
|
||||
private int field_49_first_lvc; //rename me
|
||||
private int field_50_count_lvc; //rename me
|
||||
|
||||
private int field_51_unknown;
|
||||
private int field_52_unknown; */
|
||||
//not sure about this array.
|
||||
/*
|
||||
private short field_53_fc_lcb_array_size;
|
||||
private int field_54_original_stylesheet_offset;
|
||||
private int field_55_original_stylesheet_size;
|
||||
private int field_56_stylesheet_offset;
|
||||
private int field_57_stylesheet_size;
|
||||
private int field_58_footnote_ref_offset;
|
||||
private int field_59_footnote_ref_size;
|
||||
private int field_60_footnote_plc_offset;
|
||||
private int field_61_footnote_plc_size;
|
||||
private int field_62_annotation_ref_offset;
|
||||
private int field_63_annotation_ref_size;
|
||||
private int field_64_annotation_plc_offset;
|
||||
private int field_65_annotation_plc_size; */
|
||||
/** offset in table stream of section descriptor SED PLC*/
|
||||
/* private int field_66_section_plc_offset;
|
||||
private int field_67_section_plc_size;
|
||||
private int field_68_unused;
|
||||
private int field_69_unused;
|
||||
private int field_70_pheplc_offset;
|
||||
private int field_71_pheplc_size;
|
||||
private int field_72_glossaryST_offset;
|
||||
private int field_73_glossaryST_size;
|
||||
private int field_74_glossaryPLC_offset;
|
||||
private int field_75_glossaryPLC_size;
|
||||
private int field_76_headerPLC_offset;
|
||||
private int field_77_headerPLC_size;
|
||||
private int field_78_chp_bin_table_offset;
|
||||
private int field_79_chp_bin_table_size;
|
||||
private int field_80_pap_bin_table_offset;
|
||||
private int field_81_pap_bin_table_size;
|
||||
private int field_82_sea_plc_offset;
|
||||
private int field_83_sea_plc_size;
|
||||
private int field_84_fonts_offset;
|
||||
private int field_85_fonts_size;
|
||||
private int field_86_main_fields_offset;
|
||||
private int field_87_main_fields_size;
|
||||
private int field_88_header_fields_offset;
|
||||
private int field_89_header_fields_size;
|
||||
private int field_90_footnote_fields_offset;
|
||||
private int field_91_footnote_fields_size;
|
||||
private int field_92_ann_fields_offset;
|
||||
private int field_93_ann_fields_size;
|
||||
private int field_94_unused;
|
||||
private int field_95_unused;
|
||||
private int field_96_bookmark_names_offset;
|
||||
private int field_97_bookmark_names_size;
|
||||
private int field_98_bookmark_offsets_offset;
|
||||
private int field_99_bookmark_offsets_size;
|
||||
private int field_100_macros_offset;
|
||||
private int field_101_macros_size;
|
||||
private int field_102_unused;
|
||||
private int field_103_unused;
|
||||
private int field_104_unused;
|
||||
private int field_105_unused;
|
||||
private int field_106_printer_offset;
|
||||
private int field_107_printer_size;
|
||||
private int field_108_printer_portrait_offset;
|
||||
private int field_109_printer_portrait_size;
|
||||
private int field_110_printer_landscape_offset;
|
||||
private int field_111_printer_landscape_size;
|
||||
private int field_112_wss_offset;
|
||||
private int field_113_wss_size;
|
||||
private int field_114_DOP_offset;
|
||||
private int field_115_DOP_size;
|
||||
private int field_116_sttbfassoc_offset;
|
||||
private int field_117_sttbfassoc_size; */
|
||||
/**offset in table stream of beginning of information for complex files.
|
||||
* Also, this is the beginning of the Text piece table*/ /*
|
||||
private int field_118_textPieceTable_offset;
|
||||
private int field_119_textPieceTable_size;
|
||||
private int field_199_list_format_offset;
|
||||
private int field_200_list_format_size;
|
||||
private int field_201_list_format_override_offset;
|
||||
private int field_202_list_format_override_size;
|
||||
|
||||
|
||||
|
||||
|
||||
^/
|
||||
/** Creates a new instance of FileInformationBlock */
|
||||
public FileInformationBlock(byte[] mainDocument)
|
||||
{
|
||||
fillFields(mainDocument, (short)0, (short)0);
|
||||
/* field_1_id = LittleEndian.getShort(mainDocument, 0);
|
||||
field_2_version = LittleEndian.getShort(mainDocument, 0x2); // 101 = Word 6.0 +
|
||||
field_3_product_version = LittleEndian.getShort(mainDocument, 0x4);
|
||||
field_4_language_stamp = LittleEndian.getShort(mainDocument, 0x6);
|
||||
field_5_unknown = LittleEndian.getShort(mainDocument, 0x8);
|
||||
field_6_options = LittleEndian.getShort(mainDocument, 0xa);
|
||||
|
||||
|
||||
|
||||
field_13_offset_first_char = LittleEndian.getInt(mainDocument, 0x18);
|
||||
field_34_main_streamlen = LittleEndian.getInt(mainDocument, 0x4c);
|
||||
field_35_footnote_streamlen = LittleEndian.getInt(mainDocument, 0x50);
|
||||
|
||||
field_56_stylesheet_offset = LittleEndian.getInt(mainDocument, 0xa2);
|
||||
field_57_stylesheet_size = LittleEndian.getInt(mainDocument, 0xa6);
|
||||
field_66_section_plc_offset = LittleEndian.getInt(mainDocument, 0xca);
|
||||
field_67_section_plc_size = LittleEndian.getInt(mainDocument, 0xce);
|
||||
|
||||
field_78_chp_bin_table_offset = LittleEndian.getInt(mainDocument, 0xfa);
|
||||
field_79_chp_bin_table_size = LittleEndian.getInt(mainDocument, 0xfe);
|
||||
field_80_pap_bin_table_offset = LittleEndian.getInt(mainDocument, 0x102);
|
||||
field_81_pap_bin_table_size = LittleEndian.getInt(mainDocument, 0x106);
|
||||
|
||||
field_84_fonts_offset = LittleEndian.getInt(mainDocument, 0x112);
|
||||
field_85_fonts_size = LittleEndian.getInt(mainDocument, 0x116);
|
||||
|
||||
field_114_DOP_offset = LittleEndian.getInt(mainDocument, 0x192);
|
||||
field_115_DOP_size = LittleEndian.getInt(mainDocument, 0x196);
|
||||
field_118_textPieceTable_offset = LittleEndian.getInt(mainDocument, 0x1a2);
|
||||
|
||||
field_199_list_format_offset = LittleEndian.getInt(mainDocument, 0x2e2);
|
||||
field_200_list_format_size = LittleEndian.getInt(mainDocument, 0x2e6);
|
||||
field_201_list_format_override_offset = LittleEndian.getInt(mainDocument, 0x2ea);
|
||||
field_202_list_format_override_size= LittleEndian.getInt(mainDocument, 0x2ee);*/
|
||||
|
||||
}
|
||||
/*
|
||||
public boolean useTable1()
|
||||
{
|
||||
return tabletype.setShort(field_6_options) > 0;
|
||||
}
|
||||
public int getFirstCharOffset()
|
||||
{
|
||||
return field_13_offset_first_char;
|
||||
}
|
||||
public int getStshOffset()
|
||||
{
|
||||
return field_56_stylesheet_offset;
|
||||
}
|
||||
public int getStshSize()
|
||||
{
|
||||
return field_57_stylesheet_size;
|
||||
}
|
||||
public int getSectionDescriptorOffset()
|
||||
{
|
||||
return field_66_section_plc_offset;
|
||||
}
|
||||
public int getSectionDescriptorSize()
|
||||
{
|
||||
return field_67_section_plc_size;
|
||||
}
|
||||
public int getChpBinTableOffset()
|
||||
{
|
||||
return field_78_chp_bin_table_offset;
|
||||
}
|
||||
public int getChpBinTableSize()
|
||||
{
|
||||
return field_79_chp_bin_table_size;
|
||||
}
|
||||
public int getPapBinTableOffset()
|
||||
{
|
||||
return field_80_pap_bin_table_offset;
|
||||
}
|
||||
public int getPapBinTableSize()
|
||||
{
|
||||
return field_81_pap_bin_table_size;
|
||||
}
|
||||
public int getFontsOffset()
|
||||
{
|
||||
return field_84_fonts_offset;
|
||||
}
|
||||
public int getFontsSize()
|
||||
{
|
||||
return field_85_fonts_size;
|
||||
}
|
||||
public int getDOPOffset()
|
||||
{
|
||||
return field_114_DOP_offset;
|
||||
}
|
||||
public int getDOPSize()
|
||||
{
|
||||
return field_115_DOP_size;
|
||||
}
|
||||
public int getComplexOffset()
|
||||
{
|
||||
return field_118_textPieceTable_offset;
|
||||
}
|
||||
public int getLSTOffset()
|
||||
{
|
||||
return field_199_list_format_offset;
|
||||
}
|
||||
public int getLSTSize()
|
||||
{
|
||||
return field_200_list_format_size;
|
||||
}
|
||||
public int getLFOOffset()
|
||||
{
|
||||
return field_201_list_format_override_offset;
|
||||
}
|
||||
public int getLFOSize()
|
||||
{
|
||||
return field_202_list_format_override_size;
|
||||
}
|
||||
*/
|
||||
|
||||
}
|
||||
|
||||
|
@ -1,60 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
import org.apache.poi.util.LittleEndian;
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class FontTable implements HDFType
|
||||
{
|
||||
String[] fontNames;
|
||||
|
||||
public FontTable(byte[] fontTable)
|
||||
{
|
||||
int size = LittleEndian.getShort(fontTable, 0);
|
||||
fontNames = new String[size];
|
||||
|
||||
int currentIndex = 4;
|
||||
for(int x = 0; x < size; x++)
|
||||
{
|
||||
byte ffnLength = fontTable[currentIndex];
|
||||
|
||||
int nameOffset = currentIndex + 40;
|
||||
StringBuffer nameBuf = new StringBuffer();
|
||||
//char ch = Utils.getUnicodeCharacter(fontTable, nameOffset);
|
||||
char ch = (char)LittleEndian.getShort(fontTable, nameOffset);
|
||||
while(ch != '\0')
|
||||
{
|
||||
nameBuf.append(ch);
|
||||
nameOffset += 2;
|
||||
ch = (char)LittleEndian.getShort(fontTable, nameOffset);
|
||||
}
|
||||
fontNames[x] = nameBuf.toString();
|
||||
currentIndex += ffnLength + 1;
|
||||
}
|
||||
|
||||
}
|
||||
public String getFont(int index)
|
||||
{
|
||||
return fontNames[index];
|
||||
}
|
||||
}
|
@ -1,84 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
import org.apache.poi.util.LittleEndian;
|
||||
|
||||
/**
|
||||
* Represents an FKP data structure. This data structure is used to store the
|
||||
* grpprls of the paragraph and character properties of the document. A grpprl
|
||||
* is a list of sprms(decompression operations) to perform on a parent style.
|
||||
*
|
||||
* The style properties for paragraph and character runs
|
||||
* are stored in fkps. There are PAP fkps for paragraph properties and CHP fkps
|
||||
* for character run properties. The first part of the fkp for both CHP and PAP
|
||||
* fkps consists of an array of 4 byte int offsets in the main stream for that
|
||||
* Paragraph's or Character run's text. The ending offset is the next
|
||||
* value in the array. For example, if an fkp has X number of Paragraph's
|
||||
* stored in it then there are (x + 1) 4 byte ints in the beginning array. The
|
||||
* number X is determined by the last byte in a 512 byte fkp.
|
||||
*
|
||||
* CHP and PAP fkps also store the compressed styles(grpprl) that correspond to
|
||||
* the offsets on the front of the fkp. The offset of the grpprls is determined
|
||||
* differently for CHP fkps and PAP fkps.
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public abstract class FormattedDiskPage
|
||||
{
|
||||
protected byte[] _fkp;
|
||||
protected int _crun;
|
||||
|
||||
/**
|
||||
* Uses a 512-byte array to create a FKP
|
||||
*/
|
||||
public FormattedDiskPage(byte[] fkp)
|
||||
{
|
||||
_crun = LittleEndian.getUnsignedByte(fkp, 511);
|
||||
_fkp = fkp;
|
||||
}
|
||||
/**
|
||||
* Used to get a text offset corresponding to a grpprl in this fkp.
|
||||
* @param index The index of the property in this FKP
|
||||
* @return an int representing an offset in the "WordDocument" stream
|
||||
*/
|
||||
public int getStart(int index)
|
||||
{
|
||||
return LittleEndian.getInt(_fkp, (index * 4));
|
||||
}
|
||||
/**
|
||||
* Used to get the end of the text corresponding to a grpprl in this fkp.
|
||||
* @param index The index of the property in this fkp.
|
||||
* @return an int representing an offset in the "WordDocument" stream
|
||||
*/
|
||||
public int getEnd(int index)
|
||||
{
|
||||
return LittleEndian.getInt(_fkp, ((index + 1) * 4));
|
||||
}
|
||||
/**
|
||||
* Used to get the total number of grrprl's stored int this FKP
|
||||
* @return The number of grpprls in this FKP
|
||||
*/
|
||||
public int size()
|
||||
{
|
||||
return _crun;
|
||||
}
|
||||
|
||||
public abstract byte[] getGrpprl(int index);
|
||||
}
|
@ -1,28 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author andy
|
||||
*/
|
||||
@Deprecated
|
||||
public interface HDFType {
|
||||
|
||||
}
|
||||
|
@ -1,57 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class HeaderFooter
|
||||
{
|
||||
public static final int HEADER_EVEN = 1;
|
||||
public static final int HEADER_ODD = 2;
|
||||
public static final int FOOTER_EVEN = 3;
|
||||
public static final int FOOTER_ODD = 4;
|
||||
public static final int HEADER_FIRST = 5;
|
||||
public static final int FOOTER_FIRST = 6;
|
||||
|
||||
private int _type;
|
||||
private int _start;
|
||||
private int _end;
|
||||
|
||||
public HeaderFooter(int type, int startFC, int endFC)
|
||||
{
|
||||
_type = type;
|
||||
_start = startFC;
|
||||
_end = endFC;
|
||||
}
|
||||
public int getStart()
|
||||
{
|
||||
return _start;
|
||||
}
|
||||
public int getEnd()
|
||||
{
|
||||
return _end;
|
||||
}
|
||||
public boolean isEmpty()
|
||||
{
|
||||
return _start - _end == 0;
|
||||
}
|
||||
}
|
@ -1,36 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class LFO
|
||||
{
|
||||
int _lsid;
|
||||
int _clfolvl;
|
||||
LFOLVL[] _levels;
|
||||
|
||||
public LFO()
|
||||
{
|
||||
|
||||
}
|
||||
}
|
@ -1,37 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class LFOLVL
|
||||
{
|
||||
int _iStartAt;
|
||||
int _ilvl;
|
||||
boolean _fStartAt;
|
||||
boolean _fFormatting;
|
||||
LVL _override;
|
||||
|
||||
public LFOLVL()
|
||||
{
|
||||
}
|
||||
}
|
@ -1,37 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class LST
|
||||
{
|
||||
int _lsid;
|
||||
int _tplc;
|
||||
byte[] _rgistd = new byte[18];
|
||||
boolean _fSimpleList;
|
||||
LVL[] _levels;
|
||||
|
||||
public LST()
|
||||
{
|
||||
}
|
||||
}
|
@ -1,65 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class LVL
|
||||
{
|
||||
public int _iStartAt;
|
||||
public byte _nfc;
|
||||
byte _jc;
|
||||
boolean _fLegal;
|
||||
boolean _fNoRestart;
|
||||
boolean _fPrev;
|
||||
boolean _fPrevSpace;
|
||||
boolean _fWord6;
|
||||
public byte[] _rgbxchNums = new byte[9];
|
||||
public byte _ixchFollow;
|
||||
public int _dxaSpace;
|
||||
public int _dxaIndent;
|
||||
public byte[] _chpx;
|
||||
public byte[] _papx;
|
||||
public char[] _xst;
|
||||
public short _istd;
|
||||
|
||||
//byte _cbGrpprlChpx;
|
||||
//byte _cbGrpprlPapx;
|
||||
|
||||
|
||||
public LVL()
|
||||
{
|
||||
}
|
||||
public Object clone()
|
||||
{
|
||||
LVL obj = null;
|
||||
try
|
||||
{
|
||||
obj = (LVL)super.clone();
|
||||
}
|
||||
catch(Exception e)
|
||||
{
|
||||
e.printStackTrace();
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
}
|
@ -1,208 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.poi.hdf.extractor.*;
|
||||
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class ListTables implements HDFType
|
||||
{
|
||||
|
||||
LFO[] _pllfo;
|
||||
Hashtable _lists = new Hashtable();
|
||||
|
||||
public ListTables(byte[] plcflst, byte[] plflfo)
|
||||
{
|
||||
initLST(plcflst);
|
||||
initLFO(plflfo);
|
||||
}
|
||||
public LVL getLevel(int list, int level)
|
||||
{
|
||||
|
||||
LFO override = _pllfo[list - 1];
|
||||
|
||||
for(int x = 0; x < override._clfolvl; x++)
|
||||
{
|
||||
if(override._levels[x]._ilvl == level)
|
||||
{
|
||||
LFOLVL lfolvl = override._levels[x];
|
||||
if(lfolvl._fFormatting)
|
||||
{
|
||||
LST lst = (LST)_lists.get(Integer.valueOf(override._lsid));
|
||||
LVL lvl = lfolvl._override;
|
||||
lvl._istd = Utils.convertBytesToShort(lst._rgistd, level * 2);
|
||||
return lvl;
|
||||
}
|
||||
else if(lfolvl._fStartAt)
|
||||
{
|
||||
LST lst = (LST)_lists.get(Integer.valueOf(override._lsid));
|
||||
LVL lvl = lst._levels[level];
|
||||
LVL newLvl = (LVL)lvl.clone();
|
||||
newLvl._istd = Utils.convertBytesToShort(lst._rgistd, level * 2);
|
||||
newLvl._iStartAt = lfolvl._iStartAt;
|
||||
return newLvl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
LST lst = (LST)_lists.get(Integer.valueOf(override._lsid));
|
||||
LVL lvl = lst._levels[level];
|
||||
lvl._istd = Utils.convertBytesToShort(lst._rgistd, level * 2);
|
||||
return lvl;
|
||||
|
||||
|
||||
}
|
||||
private void initLST(byte[] plcflst)
|
||||
{
|
||||
short length = Utils.convertBytesToShort(plcflst, 0);
|
||||
int nextLevelOffset = 0;
|
||||
//LST[] lstArray = new LST[length];
|
||||
for(int x = 0; x < length; x++)
|
||||
{
|
||||
LST lst = new LST();
|
||||
lst._lsid = Utils.convertBytesToInt(plcflst, 2 + (x * 28));
|
||||
lst._tplc = Utils.convertBytesToInt(plcflst, 2 + 4 + (x * 28));
|
||||
System.arraycopy(plcflst, 2 + 8 + (x * 28), lst._rgistd, 0, 18);
|
||||
byte code = plcflst[2 + 26 + (x * 28)];
|
||||
lst._fSimpleList = StyleSheet.getFlag(code & 0x01);
|
||||
//lstArray[x] = lst;
|
||||
_lists.put(Integer.valueOf(lst._lsid), lst);
|
||||
|
||||
if(lst._fSimpleList)
|
||||
{
|
||||
lst._levels = new LVL[1];
|
||||
}
|
||||
else
|
||||
{
|
||||
lst._levels = new LVL[9];
|
||||
}
|
||||
|
||||
for(int y = 0; y < lst._levels.length; y++)
|
||||
{
|
||||
int offset = 2 + (length * 28) + nextLevelOffset;
|
||||
lst._levels[y] = new LVL();
|
||||
nextLevelOffset += createLVL(plcflst, offset, lst._levels[y]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
private void initLFO(byte[] plflfo)
|
||||
{
|
||||
int lfoSize = Utils.convertBytesToInt(plflfo, 0);
|
||||
_pllfo = new LFO[lfoSize];
|
||||
for(int x = 0; x < lfoSize; x++)
|
||||
{
|
||||
LFO nextLFO = new LFO();
|
||||
nextLFO._lsid = Utils.convertBytesToInt(plflfo, 4 + (x * 16));
|
||||
nextLFO._clfolvl = plflfo[4 + 12 + (x * 16)];
|
||||
nextLFO._levels = new LFOLVL[nextLFO._clfolvl];
|
||||
_pllfo[x] = nextLFO;
|
||||
}
|
||||
|
||||
int lfolvlOffset = (lfoSize * 16) + 4;
|
||||
int lvlOffset = 0;
|
||||
int lfolvlNum = 0;
|
||||
for(int x = 0; x < lfoSize; x++)
|
||||
{
|
||||
if (_pllfo[x]._clfolvl == 0)
|
||||
// If LFO._clfolvl is 0, then it appears that Word writes
|
||||
// out a LFOLVL anyway - however, it's all 0xff. We need
|
||||
// to skip over it.
|
||||
lfolvlNum++;
|
||||
else
|
||||
{
|
||||
for(int y = 0; y < _pllfo[x]._clfolvl; y++)
|
||||
{
|
||||
int offset = lfolvlOffset + (lfolvlNum * 8) + lvlOffset;
|
||||
LFOLVL lfolvl = new LFOLVL();
|
||||
lfolvl._iStartAt = Utils.convertBytesToInt(plflfo, offset);
|
||||
lfolvl._ilvl = Utils.convertBytesToInt(plflfo, offset + 4);
|
||||
lfolvl._fStartAt = StyleSheet.getFlag(lfolvl._ilvl & 0x10);
|
||||
lfolvl._fFormatting = StyleSheet.getFlag(lfolvl._ilvl & 0x20);
|
||||
lfolvl._ilvl = (lfolvl._ilvl & (byte)0x0f);
|
||||
lfolvlNum++;
|
||||
|
||||
if(lfolvl._fFormatting)
|
||||
{
|
||||
// The size of a LFOLVL is 8 bytes.
|
||||
offset = lfolvlOffset + (lfolvlNum * 8) + lvlOffset;
|
||||
lfolvl._override = new LVL();
|
||||
lvlOffset += createLVL(plflfo, offset, lfolvl._override);
|
||||
}
|
||||
_pllfo[x]._levels[y] = lfolvl;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
private int createLVL(byte[] data, int offset, LVL lvl)
|
||||
{
|
||||
int startingOffset = offset;
|
||||
lvl._iStartAt = Utils.convertBytesToInt(data, offset);
|
||||
offset += 4;
|
||||
lvl._nfc = data[offset++];
|
||||
byte code = data[offset++];
|
||||
lvl._jc = (byte)(code & 0x03);
|
||||
lvl._fLegal = StyleSheet.getFlag(code & 0x04);
|
||||
lvl._fNoRestart = StyleSheet.getFlag(code & 0x08);
|
||||
lvl._fPrev = StyleSheet.getFlag(code & 0x10);
|
||||
lvl._fPrevSpace = StyleSheet.getFlag(code & 0x20);
|
||||
lvl._fWord6 = StyleSheet.getFlag(code & 0x40);
|
||||
|
||||
// rgbxchNums - This array should be zero terminated unless it is full
|
||||
// (all 9 levels full).
|
||||
System.arraycopy(data, offset, lvl._rgbxchNums, 0, 9);
|
||||
offset += 9;
|
||||
|
||||
lvl._ixchFollow = data[offset++];
|
||||
|
||||
if (lvl._fWord6)
|
||||
{
|
||||
lvl._dxaSpace = Utils.convertBytesToInt(data, offset);
|
||||
lvl._dxaIndent = Utils.convertBytesToInt(data, offset + 4);
|
||||
}
|
||||
offset += 8;
|
||||
|
||||
int chpxSize = data[offset++];
|
||||
int papxSize = data[offset++];
|
||||
lvl._chpx = new byte[chpxSize];
|
||||
lvl._papx = new byte[papxSize];
|
||||
|
||||
System.arraycopy(data, offset, lvl._chpx, 0, chpxSize);
|
||||
System.arraycopy(data, offset + chpxSize, lvl._papx, 0, papxSize);
|
||||
|
||||
offset += papxSize + chpxSize + 2; //don't forget to skip reserved word
|
||||
int xstSize = Utils.convertBytesToShort(data, offset);
|
||||
offset += 2;
|
||||
lvl._xst = new char[xstSize];
|
||||
|
||||
for(int x = 0; x < xstSize; x++)
|
||||
{
|
||||
lvl._xst[x] = (char)Utils.convertBytesToShort(data, offset + (x * 2));
|
||||
}
|
||||
return offset + (xstSize * 2) - startingOffset;
|
||||
}
|
||||
}
|
@ -1,75 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
import org.apache.poi.util.LittleEndian;
|
||||
|
||||
/**
|
||||
* Represents a PAP FKP. The style properties for paragraph and character runs
|
||||
* are stored in fkps. There are PAP fkps for paragraph properties and CHP fkps
|
||||
* for character run properties. The first part of the fkp for both CHP and PAP
|
||||
* fkps consists of an array of 4 byte int offsets in the main stream for that
|
||||
* Paragraph's or Character run's text. The ending offset is the next
|
||||
* value in the array. For example, if an fkp has X number of Paragraph's
|
||||
* stored in it then there are (x + 1) 4 byte ints in the beginning array. The
|
||||
* number X is determined by the last byte in a 512 byte fkp.
|
||||
*
|
||||
* CHP and PAP fkps also store the compressed styles(grpprl) that correspond to
|
||||
* the offsets on the front of the fkp. The offset of the grpprls is determined
|
||||
* differently for CHP fkps and PAP fkps.
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class PAPFormattedDiskPage extends FormattedDiskPage
|
||||
{
|
||||
|
||||
/**
|
||||
* Creates a PAPFormattedDiskPage from a 512 byte array
|
||||
*
|
||||
* @param fkp a 512 byte array.
|
||||
*/
|
||||
public PAPFormattedDiskPage(byte[] fkp)
|
||||
{
|
||||
super(fkp);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the papx for the pagraph at index in this fkp.
|
||||
*
|
||||
* @param index The index of the papx to get.
|
||||
* @return a papx grpprl.
|
||||
*/
|
||||
public byte[] getGrpprl(int index)
|
||||
{
|
||||
int papxOffset = 2 * LittleEndian.getUnsignedByte(_fkp, ((_crun + 1) * 4) + (index * 13));
|
||||
int size = 2 * LittleEndian.getUnsignedByte(_fkp, papxOffset);
|
||||
if(size == 0)
|
||||
{
|
||||
size = 2 * LittleEndian.getUnsignedByte(_fkp, ++papxOffset);
|
||||
}
|
||||
else
|
||||
{
|
||||
size--;
|
||||
}
|
||||
|
||||
byte[] papx = new byte[size];
|
||||
System.arraycopy(_fkp, ++papxOffset, papx, 0, size);
|
||||
return papx;
|
||||
}
|
||||
}
|
@ -1,39 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class PapxNode extends PropertyNode
|
||||
{
|
||||
|
||||
|
||||
public PapxNode(int fcStart, int fcEnd, byte[] papx)
|
||||
{
|
||||
super(fcStart, fcEnd, papx);
|
||||
}
|
||||
public byte[] getPapx()
|
||||
{
|
||||
return super.getGrpprl();
|
||||
}
|
||||
|
||||
}
|
@ -1,96 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
import org.apache.poi.hdf.model.hdftypes.definitions.PAPAbstractType;
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class ParagraphProperties extends PAPAbstractType implements Cloneable
|
||||
{
|
||||
|
||||
|
||||
public ParagraphProperties()
|
||||
{
|
||||
short[] lspd = new short[2];
|
||||
setFWidowControl((byte)1);
|
||||
//lspd[0] = 240;
|
||||
lspd[1] = 1;
|
||||
setIlvl((byte)9);
|
||||
|
||||
setLspd(lspd);
|
||||
setBrcBar(new short[2]);
|
||||
setBrcBottom(new short[2]);
|
||||
setBrcLeft(new short[2]);
|
||||
setBrcBetween(new short[2]);
|
||||
setBrcRight(new short[2]);
|
||||
setBrcTop(new short[2]);
|
||||
setPhe(new byte[12]);
|
||||
setAnld(new byte[84]);
|
||||
setDttmPropRMark(new byte[4]);
|
||||
setNumrm(new byte[8]);
|
||||
|
||||
|
||||
}
|
||||
public Object clone() throws CloneNotSupportedException
|
||||
{
|
||||
ParagraphProperties clone = (ParagraphProperties)super.clone();
|
||||
|
||||
short[] brcBar = new short[2];
|
||||
short[] brcBottom = new short[2];
|
||||
short[] brcLeft = new short[2];
|
||||
short[] brcBetween = new short[2];
|
||||
short[] brcRight = new short[2];
|
||||
short[] brcTop = new short[2];
|
||||
short[] lspd = new short[2];
|
||||
byte[] phe = new byte[12];
|
||||
byte[] anld = new byte[84];
|
||||
byte[] dttmPropRMark = new byte[4];
|
||||
byte[] numrm = new byte[8];
|
||||
|
||||
System.arraycopy(getBrcBar(), 0, brcBar, 0, 2);
|
||||
System.arraycopy(getBrcBottom(), 0, brcBottom, 0, 2);
|
||||
System.arraycopy(getBrcLeft(), 0, brcLeft, 0, 2);
|
||||
System.arraycopy(getBrcBetween(), 0, brcBetween, 0, 2);
|
||||
System.arraycopy(getBrcRight(), 0, brcRight, 0, 2);
|
||||
System.arraycopy(getBrcTop(), 0, brcTop, 0, 2);
|
||||
System.arraycopy(getLspd(), 0, lspd, 0, 2);
|
||||
System.arraycopy(getPhe(), 0, phe, 0, 12);
|
||||
System.arraycopy(getAnld(), 0, anld, 0, 84);
|
||||
System.arraycopy(getDttmPropRMark(), 0, dttmPropRMark, 0, 4);
|
||||
System.arraycopy(getNumrm(), 0, numrm, 0, 8);
|
||||
|
||||
|
||||
clone.setBrcBar(brcBar);
|
||||
clone.setBrcBottom(brcBottom);
|
||||
clone.setBrcLeft(brcLeft);
|
||||
clone.setBrcBetween(brcBetween);
|
||||
clone.setBrcRight(brcRight);
|
||||
clone.setBrcTop(brcTop);
|
||||
clone.setLspd(lspd);
|
||||
clone.setPhe(phe);
|
||||
clone.setAnld(anld);
|
||||
clone.setDttmPropRMark(dttmPropRMark);
|
||||
clone.setNumrm(numrm);
|
||||
return clone;
|
||||
}
|
||||
|
||||
}
|
@ -1,77 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
|
||||
/**
|
||||
* common data structure in a Word file. Contains an array of 4 byte ints in
|
||||
* the front that relate to an array of abitrary data structures in the back.
|
||||
*
|
||||
* This class acts more like a pointer. In the sense that it doesn't store any
|
||||
* data. It only provides convenience methods for accessing a particular
|
||||
* PlexOfCps
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class PlexOfCps
|
||||
{
|
||||
private int _count;
|
||||
private int _offset;
|
||||
private int _sizeOfStruct;
|
||||
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param size The size in bytes of this PlexOfCps
|
||||
* @param sizeOfStruct The size of the data structure type stored in
|
||||
* this PlexOfCps.
|
||||
*/
|
||||
public PlexOfCps(int size, int sizeOfStruct)
|
||||
{
|
||||
_count = (size - 4)/(4 + sizeOfStruct);
|
||||
_sizeOfStruct = sizeOfStruct;
|
||||
}
|
||||
public int getIntOffset(int index)
|
||||
{
|
||||
return index * 4;
|
||||
}
|
||||
/**
|
||||
* returns the number of data structures in this PlexOfCps.
|
||||
*
|
||||
* @return The number of data structures in this PlexOfCps
|
||||
*/
|
||||
public int length()
|
||||
{
|
||||
return _count;
|
||||
}
|
||||
/**
|
||||
* Returns the offset, in bytes, from the beginning if this PlexOfCps to
|
||||
* the data structure at index.
|
||||
*
|
||||
* @param index The index of the data structure.
|
||||
*
|
||||
* @return The offset, in bytes, from the beginning if this PlexOfCps to
|
||||
* the data structure at index.
|
||||
*/
|
||||
public int getStructOffset(int index)
|
||||
{
|
||||
return (4 * (_count + 1)) + (_sizeOfStruct * index);
|
||||
}
|
||||
}
|
@ -1,84 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
|
||||
/**
|
||||
* Represents a lightweight node in the Trees used to store formatting
|
||||
* properties.
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public abstract class PropertyNode implements Comparable {
|
||||
private byte[] _grpprl;
|
||||
private int _fcStart;
|
||||
private int _fcEnd;
|
||||
|
||||
/**
|
||||
* @param fcStart The start of the text for this property.
|
||||
* @param fcEnd The end of the text for this property.
|
||||
* @param grpprl The property description in compressed form.
|
||||
*/
|
||||
public PropertyNode(int fcStart, int fcEnd, byte[] grpprl)
|
||||
{
|
||||
_fcStart = fcStart;
|
||||
_fcEnd = fcEnd;
|
||||
_grpprl = grpprl;
|
||||
}
|
||||
/**
|
||||
* @return The offset of this property's text.
|
||||
*/
|
||||
public int getStart()
|
||||
{
|
||||
return _fcStart;
|
||||
}
|
||||
/**
|
||||
* @return The offset of the end of this property's text.
|
||||
*/
|
||||
public int getEnd()
|
||||
{
|
||||
return _fcEnd;
|
||||
}
|
||||
/**
|
||||
* @return This property's property in copmpressed form.
|
||||
*/
|
||||
protected byte[] getGrpprl()
|
||||
{
|
||||
return _grpprl;
|
||||
}
|
||||
/**
|
||||
* Used for sorting in collections.
|
||||
*/
|
||||
public int compareTo(Object o)
|
||||
{
|
||||
int fcEnd = ((PropertyNode)o).getEnd();
|
||||
if(_fcEnd == fcEnd)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
else if(_fcEnd < fcEnd)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
else
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,101 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
import org.apache.poi.hdf.model.hdftypes.definitions.SEPAbstractType;
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class SectionProperties extends SEPAbstractType implements HDFType
|
||||
{
|
||||
/*int _index;
|
||||
byte _bkc;
|
||||
boolean _fTitlePage;
|
||||
boolean _fAutoPgn;
|
||||
byte _nfcPgn;
|
||||
boolean _fUnlocked;
|
||||
byte _cnsPgn;
|
||||
boolean _fPgnRestart;
|
||||
boolean _fEndNote;
|
||||
byte _lnc;
|
||||
byte _grpfIhdt;
|
||||
short _nLnnMod;
|
||||
int _dxaLnn;
|
||||
short _dxaPgn;
|
||||
short _dyaPgn;
|
||||
boolean _fLBetween;
|
||||
byte _vjc;
|
||||
short _dmBinFirst;
|
||||
short _dmBinOther;
|
||||
short _dmPaperReq;
|
||||
short[] _brcTop = new short[2];
|
||||
short[] _brcLeft = new short[2];
|
||||
short[] _brcBottom = new short[2];
|
||||
short[] _brcRight = new short[2];
|
||||
boolean _fPropMark;
|
||||
int _dxtCharSpace;
|
||||
int _dyaLinePitch;
|
||||
short _clm;
|
||||
byte _dmOrientPage;
|
||||
byte _iHeadingPgn;
|
||||
short _pgnStart;
|
||||
short _lnnMin;
|
||||
short _wTextFlow;
|
||||
short _pgbProp;
|
||||
int _xaPage;
|
||||
int _yaPage;
|
||||
int _dxaLeft;
|
||||
int _dxaRight;
|
||||
int _dyaTop;
|
||||
int _dyaBottom;
|
||||
int _dzaGutter;
|
||||
int _dyaHdrTop;
|
||||
int _dyaHdrBottom;
|
||||
short _ccolM1;
|
||||
boolean _fEvenlySpaced;
|
||||
int _dxaColumns;
|
||||
int[] _rgdxaColumnWidthSpacing;
|
||||
byte _dmOrientFirst;
|
||||
byte[] _olstAnn;*/
|
||||
|
||||
|
||||
|
||||
public SectionProperties()
|
||||
{
|
||||
setBkc((byte)2);
|
||||
setDyaPgn(720);
|
||||
setDxaPgn(720);
|
||||
setFEndNote(true);
|
||||
setFEvenlySpaced(true);
|
||||
setXaPage(12240);
|
||||
setYaPage(15840);
|
||||
setDyaHdrTop(720);
|
||||
setDyaHdrBottom(720);
|
||||
setDmOrientPage((byte)1);
|
||||
setDxaColumns(720);
|
||||
setDyaTop(1440);
|
||||
setDxaLeft(1800);
|
||||
setDyaBottom(1440);
|
||||
setDxaRight(1800);
|
||||
setPgnStart(1);
|
||||
|
||||
}
|
||||
}
|
@ -1,44 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class SepxNode extends PropertyNode
|
||||
{
|
||||
|
||||
int _index;
|
||||
|
||||
public SepxNode(int index, int start, int end, byte[] sepx)
|
||||
{
|
||||
super(start, end, sepx);
|
||||
}
|
||||
public byte[] getSepx()
|
||||
{
|
||||
return getGrpprl();
|
||||
}
|
||||
|
||||
public int compareTo(Object obj) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
}
|
@ -1,132 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
import org.apache.poi.util.LittleEndian;
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class StyleDescription implements HDFType
|
||||
{
|
||||
|
||||
private static int PARAGRAPH_STYLE = 1;
|
||||
private static int CHARACTER_STYLE = 2;
|
||||
|
||||
int _baseStyleIndex;
|
||||
int _styleTypeCode;
|
||||
int _numUPX;
|
||||
byte[] _papx;
|
||||
byte[] _chpx;
|
||||
ParagraphProperties _pap;
|
||||
CharacterProperties _chp;
|
||||
|
||||
public StyleDescription()
|
||||
{
|
||||
_pap = new ParagraphProperties();
|
||||
_chp = new CharacterProperties();
|
||||
}
|
||||
public StyleDescription(byte[] std, int baseLength, boolean word9)
|
||||
{
|
||||
int infoShort = LittleEndian.getShort(std, 2);
|
||||
_styleTypeCode = (infoShort & 0xf);
|
||||
_baseStyleIndex = (infoShort & 0xfff0) >> 4;
|
||||
|
||||
infoShort = LittleEndian.getShort(std, 4);
|
||||
_numUPX = infoShort & 0xf;
|
||||
|
||||
//first byte(s) of variable length section of std is the length of the
|
||||
//style name and aliases string
|
||||
int nameLength = 0;
|
||||
int multiplier = 1;
|
||||
if(word9)
|
||||
{
|
||||
nameLength = LittleEndian.getShort(std, baseLength);
|
||||
multiplier = 2;
|
||||
}
|
||||
else
|
||||
{
|
||||
nameLength = std[baseLength];
|
||||
}
|
||||
//2 bytes for length, length then null terminator.
|
||||
int grupxStart = multiplier + ((nameLength + 1) * multiplier) + baseLength;
|
||||
|
||||
int offset = 0;
|
||||
for(int x = 0; x < _numUPX; x++)
|
||||
{
|
||||
int upxSize = LittleEndian.getShort(std, grupxStart + offset);
|
||||
if(_styleTypeCode == PARAGRAPH_STYLE)
|
||||
{
|
||||
if(x == 0)
|
||||
{
|
||||
_papx = new byte[upxSize];
|
||||
System.arraycopy(std, grupxStart + offset + 2, _papx, 0, upxSize);
|
||||
}
|
||||
else if(x == 1)
|
||||
{
|
||||
_chpx = new byte[upxSize];
|
||||
System.arraycopy(std, grupxStart + offset + 2, _chpx, 0, upxSize);
|
||||
}
|
||||
}
|
||||
else if(_styleTypeCode == CHARACTER_STYLE && x == 0)
|
||||
{
|
||||
_chpx = new byte[upxSize];
|
||||
System.arraycopy(std, grupxStart + offset + 2, _chpx, 0, upxSize);
|
||||
}
|
||||
|
||||
if(upxSize % 2 == 1)
|
||||
{
|
||||
++upxSize;
|
||||
}
|
||||
offset += 2 + upxSize;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
public int getBaseStyle()
|
||||
{
|
||||
return _baseStyleIndex;
|
||||
}
|
||||
public byte[] getCHPX()
|
||||
{
|
||||
return _chpx;
|
||||
}
|
||||
public byte[] getPAPX()
|
||||
{
|
||||
return _papx;
|
||||
}
|
||||
public ParagraphProperties getPAP()
|
||||
{
|
||||
return _pap;
|
||||
}
|
||||
public CharacterProperties getCHP()
|
||||
{
|
||||
return _chp;
|
||||
}
|
||||
public void setPAP(ParagraphProperties pap)
|
||||
{
|
||||
_pap = pap;
|
||||
}
|
||||
public void setCHP(CharacterProperties chp)
|
||||
{
|
||||
_chp = chp;
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -1,80 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
import org.apache.poi.hdf.model.hdftypes.definitions.TCAbstractType;
|
||||
import org.apache.poi.util.LittleEndian;
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class TableCellDescriptor extends TCAbstractType implements HDFType
|
||||
{
|
||||
|
||||
/*boolean _fFirstMerged;
|
||||
boolean _fMerged;
|
||||
boolean _fVertical;
|
||||
boolean _fBackward;
|
||||
boolean _fRotateFont;
|
||||
boolean _fVertMerge;
|
||||
boolean _fVertRestart;
|
||||
short _vertAlign;
|
||||
short[] _brcTop = new short[2];
|
||||
short[] _brcLeft = new short[2];
|
||||
short[] _brcBottom = new short[2];
|
||||
short[] _brcRight = new short [2];*/
|
||||
|
||||
public TableCellDescriptor()
|
||||
{
|
||||
}
|
||||
static TableCellDescriptor convertBytesToTC(byte[] array, int offset)
|
||||
{
|
||||
TableCellDescriptor tc = new TableCellDescriptor();
|
||||
int rgf = LittleEndian.getShort(array, offset);
|
||||
tc.setFFirstMerged((rgf & 0x0001) > 0);
|
||||
tc.setFMerged((rgf & 0x0002) > 0);
|
||||
tc.setFVertical((rgf & 0x0004) > 0);
|
||||
tc.setFBackward((rgf & 0x0008) > 0);
|
||||
tc.setFRotateFont((rgf & 0x0010) > 0);
|
||||
tc.setFVertMerge((rgf & 0x0020) > 0);
|
||||
tc.setFVertRestart((rgf & 0x0040) > 0);
|
||||
tc.setVertAlign((byte)((rgf & 0x0180) >> 7));
|
||||
|
||||
short[] brcTop = new short[2];
|
||||
short[] brcLeft = new short[2];
|
||||
short[] brcBottom = new short[2];
|
||||
short[] brcRight = new short[2];
|
||||
|
||||
brcTop[0] = LittleEndian.getShort(array, offset + 4);
|
||||
brcTop[1] = LittleEndian.getShort(array, offset + 6);
|
||||
|
||||
brcLeft[0] = LittleEndian.getShort(array, offset + 8);
|
||||
brcLeft[1] = LittleEndian.getShort(array, offset + 10);
|
||||
|
||||
brcBottom[0] = LittleEndian.getShort(array, offset + 12);
|
||||
brcBottom[1] = LittleEndian.getShort(array, offset + 14);
|
||||
|
||||
brcRight[0] = LittleEndian.getShort(array, offset + 16);
|
||||
brcRight[1] = LittleEndian.getShort(array, offset + 18);
|
||||
|
||||
return tc;
|
||||
}
|
||||
|
||||
}
|
@ -1,35 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
import org.apache.poi.hdf.model.hdftypes.definitions.TAPAbstractType;
|
||||
|
||||
/**
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class TableProperties extends TAPAbstractType
|
||||
{
|
||||
|
||||
|
||||
public TableProperties()
|
||||
{
|
||||
}
|
||||
}
|
@ -1,54 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Lightweight representation of a text piece.
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public final class TextPiece extends PropertyNode implements Comparable
|
||||
{
|
||||
private boolean _usesUnicode;
|
||||
private int _length;
|
||||
|
||||
/**
|
||||
* @param start Offset in main document stream.
|
||||
* @param length The total length of the text in bytes. Note: 1 character
|
||||
* does not necessarily refer to 1 byte.
|
||||
* @param unicode true if this text is unicode.
|
||||
*/
|
||||
public TextPiece(int start, int length, boolean unicode)
|
||||
{
|
||||
super(start, start + length, null);
|
||||
_usesUnicode = unicode;
|
||||
_length = length;
|
||||
|
||||
}
|
||||
/**
|
||||
* @return If this text piece uses unicode
|
||||
*/
|
||||
public boolean usesUnicode()
|
||||
{
|
||||
return _usesUnicode;
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,328 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes.definitions;
|
||||
|
||||
import org.apache.poi.hdf.model.hdftypes.HDFType;
|
||||
|
||||
/**
|
||||
* Table Properties.
|
||||
* NOTE: This source is automatically generated please do not modify this file. Either subclass or
|
||||
* remove the record in src/records/definitions.
|
||||
|
||||
* @author S. Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public abstract class TAPAbstractType
|
||||
implements HDFType
|
||||
{
|
||||
|
||||
private int field_1_jc;
|
||||
private int field_2_dxaGapHalf;
|
||||
private int field_3_dyaRowHeight;
|
||||
private boolean field_4_fCantSplit;
|
||||
private boolean field_5_fTableHeader;
|
||||
private int field_6_tlp;
|
||||
private short field_7_itcMac;
|
||||
private short[] field_8_rgdxaCenter;
|
||||
private TCAbstractType[] field_9_rgtc;
|
||||
private byte[] field_10_rgshd;
|
||||
private short[] field_11_brcBottom;
|
||||
private short[] field_12_brcTop;
|
||||
private short[] field_13_brcLeft;
|
||||
private short[] field_14_brcRight;
|
||||
private short[] field_15_brcVertical;
|
||||
private short[] field_16_brcHorizontal;
|
||||
|
||||
|
||||
public TAPAbstractType()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Size of record (exluding 4 byte header)
|
||||
*/
|
||||
public int getSize()
|
||||
{
|
||||
return 4 + + 2 + 4 + 4 + 0 + 0 + 4 + 2 + 130 + 0 + 0 + 4 + 4 + 4 + 4 + 4 + 4;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Get the jc field for the TAP record.
|
||||
*/
|
||||
public int getJc()
|
||||
{
|
||||
return field_1_jc;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the jc field for the TAP record.
|
||||
*/
|
||||
public void setJc(int field_1_jc)
|
||||
{
|
||||
this.field_1_jc = field_1_jc;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the dxaGapHalf field for the TAP record.
|
||||
*/
|
||||
public int getDxaGapHalf()
|
||||
{
|
||||
return field_2_dxaGapHalf;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the dxaGapHalf field for the TAP record.
|
||||
*/
|
||||
public void setDxaGapHalf(int field_2_dxaGapHalf)
|
||||
{
|
||||
this.field_2_dxaGapHalf = field_2_dxaGapHalf;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the dyaRowHeight field for the TAP record.
|
||||
*/
|
||||
public int getDyaRowHeight()
|
||||
{
|
||||
return field_3_dyaRowHeight;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the dyaRowHeight field for the TAP record.
|
||||
*/
|
||||
public void setDyaRowHeight(int field_3_dyaRowHeight)
|
||||
{
|
||||
this.field_3_dyaRowHeight = field_3_dyaRowHeight;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the fCantSplit field for the TAP record.
|
||||
*/
|
||||
public boolean getFCantSplit()
|
||||
{
|
||||
return field_4_fCantSplit;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the fCantSplit field for the TAP record.
|
||||
*/
|
||||
public void setFCantSplit(boolean field_4_fCantSplit)
|
||||
{
|
||||
this.field_4_fCantSplit = field_4_fCantSplit;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the fTableHeader field for the TAP record.
|
||||
*/
|
||||
public boolean getFTableHeader()
|
||||
{
|
||||
return field_5_fTableHeader;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the fTableHeader field for the TAP record.
|
||||
*/
|
||||
public void setFTableHeader(boolean field_5_fTableHeader)
|
||||
{
|
||||
this.field_5_fTableHeader = field_5_fTableHeader;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the tlp field for the TAP record.
|
||||
*/
|
||||
public int getTlp()
|
||||
{
|
||||
return field_6_tlp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the tlp field for the TAP record.
|
||||
*/
|
||||
public void setTlp(int field_6_tlp)
|
||||
{
|
||||
this.field_6_tlp = field_6_tlp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the itcMac field for the TAP record.
|
||||
*/
|
||||
public short getItcMac()
|
||||
{
|
||||
return field_7_itcMac;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the itcMac field for the TAP record.
|
||||
*/
|
||||
public void setItcMac(short field_7_itcMac)
|
||||
{
|
||||
this.field_7_itcMac = field_7_itcMac;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the rgdxaCenter field for the TAP record.
|
||||
*/
|
||||
public short[] getRgdxaCenter()
|
||||
{
|
||||
return field_8_rgdxaCenter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the rgdxaCenter field for the TAP record.
|
||||
*/
|
||||
public void setRgdxaCenter(short[] field_8_rgdxaCenter)
|
||||
{
|
||||
this.field_8_rgdxaCenter = field_8_rgdxaCenter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the rgtc field for the TAP record.
|
||||
*/
|
||||
public TCAbstractType[] getRgtc()
|
||||
{
|
||||
return field_9_rgtc;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the rgtc field for the TAP record.
|
||||
*/
|
||||
public void setRgtc(TCAbstractType[] field_9_rgtc)
|
||||
{
|
||||
this.field_9_rgtc = field_9_rgtc;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the rgshd field for the TAP record.
|
||||
*/
|
||||
public byte[] getRgshd()
|
||||
{
|
||||
return field_10_rgshd;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the rgshd field for the TAP record.
|
||||
*/
|
||||
public void setRgshd(byte[] field_10_rgshd)
|
||||
{
|
||||
this.field_10_rgshd = field_10_rgshd;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the brcBottom field for the TAP record.
|
||||
*/
|
||||
public short[] getBrcBottom()
|
||||
{
|
||||
return field_11_brcBottom;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the brcBottom field for the TAP record.
|
||||
*/
|
||||
public void setBrcBottom(short[] field_11_brcBottom)
|
||||
{
|
||||
this.field_11_brcBottom = field_11_brcBottom;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the brcTop field for the TAP record.
|
||||
*/
|
||||
public short[] getBrcTop()
|
||||
{
|
||||
return field_12_brcTop;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the brcTop field for the TAP record.
|
||||
*/
|
||||
public void setBrcTop(short[] field_12_brcTop)
|
||||
{
|
||||
this.field_12_brcTop = field_12_brcTop;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the brcLeft field for the TAP record.
|
||||
*/
|
||||
public short[] getBrcLeft()
|
||||
{
|
||||
return field_13_brcLeft;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the brcLeft field for the TAP record.
|
||||
*/
|
||||
public void setBrcLeft(short[] field_13_brcLeft)
|
||||
{
|
||||
this.field_13_brcLeft = field_13_brcLeft;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the brcRight field for the TAP record.
|
||||
*/
|
||||
public short[] getBrcRight()
|
||||
{
|
||||
return field_14_brcRight;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the brcRight field for the TAP record.
|
||||
*/
|
||||
public void setBrcRight(short[] field_14_brcRight)
|
||||
{
|
||||
this.field_14_brcRight = field_14_brcRight;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the brcVertical field for the TAP record.
|
||||
*/
|
||||
public short[] getBrcVertical()
|
||||
{
|
||||
return field_15_brcVertical;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the brcVertical field for the TAP record.
|
||||
*/
|
||||
public void setBrcVertical(short[] field_15_brcVertical)
|
||||
{
|
||||
this.field_15_brcVertical = field_15_brcVertical;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the brcHorizontal field for the TAP record.
|
||||
*/
|
||||
public short[] getBrcHorizontal()
|
||||
{
|
||||
return field_16_brcHorizontal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the brcHorizontal field for the TAP record.
|
||||
*/
|
||||
public void setBrcHorizontal(short[] field_16_brcHorizontal)
|
||||
{
|
||||
this.field_16_brcHorizontal = field_16_brcHorizontal;
|
||||
}
|
||||
|
||||
|
||||
} // END OF CLASS
|
||||
|
||||
|
||||
|
||||
|
@ -1,336 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.hdftypes.definitions;
|
||||
|
||||
import org.apache.poi.util.BitField;
|
||||
import org.apache.poi.util.BitFieldFactory;
|
||||
import org.apache.poi.hdf.model.hdftypes.HDFType;
|
||||
|
||||
/**
|
||||
* Table Cell Descriptor.
|
||||
* NOTE: This source is automatically generated please do not modify this file. Either subclass or
|
||||
* remove the record in src/records/definitions.
|
||||
|
||||
* @author S. Ryan Ackley
|
||||
*/
|
||||
@Deprecated
|
||||
public abstract class TCAbstractType
|
||||
implements HDFType
|
||||
{
|
||||
|
||||
private short field_1_rgf;
|
||||
private static BitField fFirstMerged = BitFieldFactory.getInstance(0x0001);
|
||||
private static BitField fMerged = BitFieldFactory.getInstance(0x0002);
|
||||
private static BitField fVertical = BitFieldFactory.getInstance(0x0004);
|
||||
private static BitField fBackward = BitFieldFactory.getInstance(0x0008);
|
||||
private static BitField fRotateFont = BitFieldFactory.getInstance(0x0010);
|
||||
private static BitField fVertMerge = BitFieldFactory.getInstance(0x0020);
|
||||
private static BitField fVertRestart = BitFieldFactory.getInstance(0x0040);
|
||||
private static BitField vertAlign = BitFieldFactory.getInstance(0x0180);
|
||||
private short field_2_unused;
|
||||
private short[] field_3_brcTop;
|
||||
private short[] field_4_brcLeft;
|
||||
private short[] field_5_brcBottom;
|
||||
private short[] field_6_brcRight;
|
||||
|
||||
|
||||
public TCAbstractType()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Size of record (exluding 4 byte header)
|
||||
*/
|
||||
public int getSize()
|
||||
{
|
||||
return 4 + + 2 + 2 + 4 + 4 + 4 + 4;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Get the rgf field for the TC record.
|
||||
*/
|
||||
public short getRgf()
|
||||
{
|
||||
return field_1_rgf;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the rgf field for the TC record.
|
||||
*/
|
||||
public void setRgf(short field_1_rgf)
|
||||
{
|
||||
this.field_1_rgf = field_1_rgf;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the unused field for the TC record.
|
||||
*/
|
||||
public short getUnused()
|
||||
{
|
||||
return field_2_unused;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the unused field for the TC record.
|
||||
*/
|
||||
public void setUnused(short field_2_unused)
|
||||
{
|
||||
this.field_2_unused = field_2_unused;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the brcTop field for the TC record.
|
||||
*/
|
||||
public short[] getBrcTop()
|
||||
{
|
||||
return field_3_brcTop;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the brcTop field for the TC record.
|
||||
*/
|
||||
public void setBrcTop(short[] field_3_brcTop)
|
||||
{
|
||||
this.field_3_brcTop = field_3_brcTop;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the brcLeft field for the TC record.
|
||||
*/
|
||||
public short[] getBrcLeft()
|
||||
{
|
||||
return field_4_brcLeft;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the brcLeft field for the TC record.
|
||||
*/
|
||||
public void setBrcLeft(short[] field_4_brcLeft)
|
||||
{
|
||||
this.field_4_brcLeft = field_4_brcLeft;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the brcBottom field for the TC record.
|
||||
*/
|
||||
public short[] getBrcBottom()
|
||||
{
|
||||
return field_5_brcBottom;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the brcBottom field for the TC record.
|
||||
*/
|
||||
public void setBrcBottom(short[] field_5_brcBottom)
|
||||
{
|
||||
this.field_5_brcBottom = field_5_brcBottom;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the brcRight field for the TC record.
|
||||
*/
|
||||
public short[] getBrcRight()
|
||||
{
|
||||
return field_6_brcRight;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the brcRight field for the TC record.
|
||||
*/
|
||||
public void setBrcRight(short[] field_6_brcRight)
|
||||
{
|
||||
this.field_6_brcRight = field_6_brcRight;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the fFirstMerged field value.
|
||||
*
|
||||
*/
|
||||
public void setFFirstMerged(boolean value)
|
||||
{
|
||||
field_1_rgf = (short)fFirstMerged.setBoolean(field_1_rgf, value);
|
||||
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return the fFirstMerged field value.
|
||||
*/
|
||||
public boolean isFFirstMerged()
|
||||
{
|
||||
return fFirstMerged.isSet(field_1_rgf);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the fMerged field value.
|
||||
*
|
||||
*/
|
||||
public void setFMerged(boolean value)
|
||||
{
|
||||
field_1_rgf = (short)fMerged.setBoolean(field_1_rgf, value);
|
||||
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return the fMerged field value.
|
||||
*/
|
||||
public boolean isFMerged()
|
||||
{
|
||||
return fMerged.isSet(field_1_rgf);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the fVertical field value.
|
||||
*
|
||||
*/
|
||||
public void setFVertical(boolean value)
|
||||
{
|
||||
field_1_rgf = (short)fVertical.setBoolean(field_1_rgf, value);
|
||||
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return the fVertical field value.
|
||||
*/
|
||||
public boolean isFVertical()
|
||||
{
|
||||
return fVertical.isSet(field_1_rgf);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the fBackward field value.
|
||||
*
|
||||
*/
|
||||
public void setFBackward(boolean value)
|
||||
{
|
||||
field_1_rgf = (short)fBackward.setBoolean(field_1_rgf, value);
|
||||
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return the fBackward field value.
|
||||
*/
|
||||
public boolean isFBackward()
|
||||
{
|
||||
return fBackward.isSet(field_1_rgf);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the fRotateFont field value.
|
||||
*
|
||||
*/
|
||||
public void setFRotateFont(boolean value)
|
||||
{
|
||||
field_1_rgf = (short)fRotateFont.setBoolean(field_1_rgf, value);
|
||||
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return the fRotateFont field value.
|
||||
*/
|
||||
public boolean isFRotateFont()
|
||||
{
|
||||
return fRotateFont.isSet(field_1_rgf);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the fVertMerge field value.
|
||||
*
|
||||
*/
|
||||
public void setFVertMerge(boolean value)
|
||||
{
|
||||
field_1_rgf = (short)fVertMerge.setBoolean(field_1_rgf, value);
|
||||
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return the fVertMerge field value.
|
||||
*/
|
||||
public boolean isFVertMerge()
|
||||
{
|
||||
return fVertMerge.isSet(field_1_rgf);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the fVertRestart field value.
|
||||
*
|
||||
*/
|
||||
public void setFVertRestart(boolean value)
|
||||
{
|
||||
field_1_rgf = (short)fVertRestart.setBoolean(field_1_rgf, value);
|
||||
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return the fVertRestart field value.
|
||||
*/
|
||||
public boolean isFVertRestart()
|
||||
{
|
||||
return fVertRestart.isSet(field_1_rgf);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the vertAlign field value.
|
||||
*
|
||||
*/
|
||||
public void setVertAlign(byte value)
|
||||
{
|
||||
field_1_rgf = (short)vertAlign.setValue(field_1_rgf, value);
|
||||
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return the vertAlign field value.
|
||||
*/
|
||||
public byte getVertAlign()
|
||||
{
|
||||
return ( byte )vertAlign.getValue(field_1_rgf);
|
||||
|
||||
}
|
||||
|
||||
|
||||
} // END OF CLASS
|
||||
|
||||
|
||||
|
||||
|
@ -1,836 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.util;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.poi.hdf.model.hdftypes.PropertyNode;
|
||||
|
||||
/*
|
||||
* A B-Tree like implementation of the java.util.Set inteface. This is a modifiable set
|
||||
* and thus allows elements to be added and removed. An instance of java.util.Comparator
|
||||
* must be provided at construction else all Objects added to the set must implement
|
||||
* java.util.Comparable and must be comparable to one another. No duplicate elements
|
||||
* will be allowed in any BTreeSet in accordance with the specifications of the Set interface.
|
||||
* Any attempt to add a null element will result in an IllegalArgumentException being thrown.
|
||||
* The java.util.Iterator returned by the iterator method guarantees the elements returned
|
||||
* are in ascending order. The Iterator.remove() method is supported.
|
||||
* Comment me
|
||||
*
|
||||
* @author Ryan Ackley
|
||||
*
|
||||
*/
|
||||
@Deprecated
|
||||
public final class BTreeSet extends AbstractSet<PropertyNode>
|
||||
{
|
||||
|
||||
/*
|
||||
* Instance Variables
|
||||
*/
|
||||
public BTreeNode root;
|
||||
private Comparator<PropertyNode> comparator = null;
|
||||
private int order;
|
||||
int size = 0;
|
||||
|
||||
/*
|
||||
* Constructors
|
||||
* A no-arg constructor is supported in accordance with the specifications of the
|
||||
* java.util.Collections interface. If the order for the B-Tree is not specified
|
||||
* at construction it defaults to 32.
|
||||
*/
|
||||
|
||||
public BTreeSet()
|
||||
{
|
||||
this(6); // Default order for a BTreeSet is 32
|
||||
}
|
||||
|
||||
public BTreeSet(Collection<PropertyNode> c)
|
||||
{
|
||||
this(6); // Default order for a BTreeSet is 32
|
||||
addAll(c);
|
||||
}
|
||||
|
||||
public BTreeSet(int order)
|
||||
{
|
||||
this(order, null);
|
||||
}
|
||||
|
||||
public BTreeSet(int order, Comparator<PropertyNode> comparator)
|
||||
{
|
||||
this.order = order;
|
||||
this.comparator = comparator;
|
||||
root = new BTreeNode(null);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Public Methods
|
||||
*/
|
||||
public boolean add(PropertyNode x) throws IllegalArgumentException
|
||||
{
|
||||
if (x == null) throw new IllegalArgumentException();
|
||||
return root.insert(x, -1);
|
||||
}
|
||||
|
||||
public boolean contains(PropertyNode x)
|
||||
{
|
||||
return root.includes(x);
|
||||
}
|
||||
|
||||
public boolean remove(PropertyNode x)
|
||||
{
|
||||
if (x == null) return false;
|
||||
return root.delete(x, -1);
|
||||
}
|
||||
|
||||
public int size()
|
||||
{
|
||||
return size;
|
||||
}
|
||||
|
||||
public void clear()
|
||||
{
|
||||
root = new BTreeNode(null);
|
||||
size = 0;
|
||||
}
|
||||
|
||||
public java.util.Iterator<PropertyNode> iterator()
|
||||
{
|
||||
return new Iterator();
|
||||
}
|
||||
|
||||
public static List<PropertyNode> findProperties(int start, int end, BTreeSet.BTreeNode root)
|
||||
{
|
||||
List<PropertyNode> results = new ArrayList<PropertyNode>();
|
||||
BTreeSet.Entry[] entries = root.entries;
|
||||
|
||||
for(int x = 0; x < entries.length; x++)
|
||||
{
|
||||
if(entries[x] != null)
|
||||
{
|
||||
BTreeSet.BTreeNode child = entries[x].child;
|
||||
PropertyNode xNode = entries[x].element;
|
||||
if(xNode != null)
|
||||
{
|
||||
int xStart = xNode.getStart();
|
||||
int xEnd = xNode.getEnd();
|
||||
if(xStart < end)
|
||||
{
|
||||
if(xStart >= start)
|
||||
{
|
||||
if(child != null)
|
||||
{
|
||||
List<PropertyNode> beforeItems = findProperties(start, end, child);
|
||||
results.addAll(beforeItems);
|
||||
}
|
||||
results.add(xNode);
|
||||
}
|
||||
else if(start < xEnd)
|
||||
{
|
||||
results.add(xNode);
|
||||
//break;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if(child != null)
|
||||
{
|
||||
List<PropertyNode> beforeItems = findProperties(start, end, child);
|
||||
results.addAll(beforeItems);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
else if(child != null)
|
||||
{
|
||||
List<PropertyNode> afterItems = findProperties(start, end, child);
|
||||
results.addAll(afterItems);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
/*
|
||||
* Private methods
|
||||
*/
|
||||
int compare(PropertyNode x, PropertyNode y)
|
||||
{
|
||||
return (comparator == null ? x.compareTo(y) : comparator.compare(x, y));
|
||||
}
|
||||
|
||||
|
||||
|
||||
/*
|
||||
* Inner Classes
|
||||
*/
|
||||
|
||||
/*
|
||||
* Guarantees that the Objects are returned in ascending order. Due to the volatile
|
||||
* structure of a B-Tree (many splits, steals and merges can happen in a single call to remove)
|
||||
* this Iterator does not attempt to track any concurrent changes that are happening to
|
||||
* it's BTreeSet. Therefore, after every call to BTreeSet.remove or BTreeSet.add a new
|
||||
* Iterator should be constructed. If no new Iterator is constructed than there is a
|
||||
* chance of receiving a NullPointerException. The Iterator.delete method is supported.
|
||||
*/
|
||||
|
||||
private class Iterator implements java.util.Iterator<PropertyNode>
|
||||
{
|
||||
private int index = 0;
|
||||
private Stack<Integer> parentIndex = new Stack<Integer>(); // Contains all parentIndicies for currentNode
|
||||
private PropertyNode lastReturned = null;
|
||||
private PropertyNode next;
|
||||
private BTreeNode currentNode;
|
||||
|
||||
Iterator()
|
||||
{
|
||||
currentNode = firstNode();
|
||||
next = nextElement();
|
||||
}
|
||||
|
||||
public boolean hasNext()
|
||||
{
|
||||
return next != null;
|
||||
}
|
||||
|
||||
public PropertyNode next()
|
||||
{
|
||||
if (next == null) throw new NoSuchElementException();
|
||||
|
||||
lastReturned = next;
|
||||
next = nextElement();
|
||||
return lastReturned;
|
||||
}
|
||||
|
||||
public void remove()
|
||||
{
|
||||
if (lastReturned == null) throw new NoSuchElementException();
|
||||
|
||||
BTreeSet.this.remove(lastReturned);
|
||||
lastReturned = null;
|
||||
}
|
||||
|
||||
private BTreeNode firstNode()
|
||||
{
|
||||
BTreeNode temp = BTreeSet.this.root;
|
||||
|
||||
while (temp.entries[0].child != null)
|
||||
{
|
||||
temp = temp.entries[0].child;
|
||||
parentIndex.push(Integer.valueOf(0));
|
||||
}
|
||||
|
||||
return temp;
|
||||
}
|
||||
|
||||
private PropertyNode nextElement()
|
||||
{
|
||||
if (currentNode.isLeaf())
|
||||
{
|
||||
if (index < currentNode.nrElements) return currentNode.entries[index++].element;
|
||||
|
||||
else if (!parentIndex.empty())
|
||||
{ //All elements have been returned, return successor of lastReturned if it exists
|
||||
currentNode = currentNode.parent;
|
||||
index = parentIndex.pop().intValue();
|
||||
|
||||
while (index == currentNode.nrElements)
|
||||
{
|
||||
if (parentIndex.empty()) break;
|
||||
currentNode = currentNode.parent;
|
||||
index = parentIndex.pop().intValue();
|
||||
}
|
||||
|
||||
if (index == currentNode.nrElements) return null; //Reached root and he has no more children
|
||||
return currentNode.entries[index++].element;
|
||||
}
|
||||
|
||||
else
|
||||
{ //Your a leaf and the root
|
||||
if (index == currentNode.nrElements) return null;
|
||||
return currentNode.entries[index++].element;
|
||||
}
|
||||
}
|
||||
|
||||
// else - You're not a leaf so simply find and return the successor of lastReturned
|
||||
currentNode = currentNode.entries[index].child;
|
||||
parentIndex.push(Integer.valueOf(index));
|
||||
|
||||
while (currentNode.entries[0].child != null)
|
||||
{
|
||||
currentNode = currentNode.entries[0].child;
|
||||
parentIndex.push(Integer.valueOf(0));
|
||||
}
|
||||
|
||||
index = 1;
|
||||
return currentNode.entries[0].element;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static class Entry
|
||||
{
|
||||
|
||||
public PropertyNode element;
|
||||
public BTreeNode child;
|
||||
}
|
||||
|
||||
|
||||
public class BTreeNode
|
||||
{
|
||||
|
||||
public Entry[] entries;
|
||||
public BTreeNode parent;
|
||||
private int nrElements = 0;
|
||||
private final int MIN = (BTreeSet.this.order - 1) / 2;
|
||||
|
||||
BTreeNode(BTreeNode parent)
|
||||
{
|
||||
this.parent = parent;
|
||||
entries = new Entry[BTreeSet.this.order];
|
||||
entries[0] = new Entry();
|
||||
}
|
||||
|
||||
boolean insert(PropertyNode x, int parentIndex)
|
||||
{
|
||||
if (isFull())
|
||||
{ // If full, you must split and promote splitNode before inserting
|
||||
PropertyNode splitNode = entries[nrElements / 2].element;
|
||||
BTreeNode rightSibling = split();
|
||||
|
||||
if (isRoot())
|
||||
{ // Grow a level
|
||||
splitRoot(splitNode, this, rightSibling);
|
||||
// Determine where to insert
|
||||
if (BTreeSet.this.compare(x, BTreeSet.this.root.entries[0].element) < 0) insert(x, 0);
|
||||
else rightSibling.insert(x, 1);
|
||||
}
|
||||
|
||||
else
|
||||
{ // Promote splitNode
|
||||
parent.insertSplitNode(splitNode, this, rightSibling, parentIndex);
|
||||
if (BTreeSet.this.compare(x, parent.entries[parentIndex].element) < 0) {
|
||||
return insert(x, parentIndex);
|
||||
}
|
||||
return rightSibling.insert(x, parentIndex + 1);
|
||||
}
|
||||
}
|
||||
|
||||
else if (isLeaf())
|
||||
{ // If leaf, simply insert the non-duplicate element
|
||||
int insertAt = childToInsertAt(x, true);
|
||||
// Determine if the element already exists
|
||||
if (insertAt == -1) {
|
||||
return false;
|
||||
}
|
||||
insertNewElement(x, insertAt);
|
||||
BTreeSet.this.size++;
|
||||
return true;
|
||||
}
|
||||
|
||||
else
|
||||
{ // If not full and not leaf recursively find correct node to insert at
|
||||
int insertAt = childToInsertAt(x, true);
|
||||
return (insertAt == -1 ? false : entries[insertAt].child.insert(x, insertAt));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean includes(PropertyNode x)
|
||||
{
|
||||
int index = childToInsertAt(x, true);
|
||||
if (index == -1) return true;
|
||||
if (entries[index] == null || entries[index].child == null) return false;
|
||||
return entries[index].child.includes(x);
|
||||
}
|
||||
|
||||
boolean delete(PropertyNode x, int parentIndex)
|
||||
{
|
||||
int i = childToInsertAt(x, true);
|
||||
int priorParentIndex = parentIndex;
|
||||
BTreeNode temp = this;
|
||||
if (i != -1)
|
||||
{
|
||||
do
|
||||
{
|
||||
if (temp.entries[i] == null || temp.entries[i].child == null) return false;
|
||||
temp = temp.entries[i].child;
|
||||
priorParentIndex = parentIndex;
|
||||
parentIndex = i;
|
||||
i = temp.childToInsertAt(x, true);
|
||||
} while (i != -1);
|
||||
} // Now temp contains element to delete and temp's parentIndex is parentIndex
|
||||
|
||||
if (temp.isLeaf())
|
||||
{ // If leaf and have more than MIN elements, simply delete
|
||||
if (temp.nrElements > MIN)
|
||||
{
|
||||
temp.deleteElement(x);
|
||||
BTreeSet.this.size--;
|
||||
return true;
|
||||
}
|
||||
|
||||
// else - If leaf and have less than MIN elements, than prepare the BTreeSet for deletion
|
||||
temp.prepareForDeletion(parentIndex);
|
||||
temp.deleteElement(x);
|
||||
BTreeSet.this.size--;
|
||||
temp.fixAfterDeletion(priorParentIndex);
|
||||
return true;
|
||||
}
|
||||
|
||||
// else - Only delete at leaf so first switch with successor than delete
|
||||
temp.switchWithSuccessor(x);
|
||||
parentIndex = temp.childToInsertAt(x, false) + 1;
|
||||
return temp.entries[parentIndex].child.delete(x, parentIndex);
|
||||
|
||||
}
|
||||
|
||||
|
||||
private boolean isFull() { return nrElements == (BTreeSet.this.order - 1); }
|
||||
|
||||
private boolean isLeaf() { return entries[0].child == null; }
|
||||
|
||||
private boolean isRoot() { return parent == null; }
|
||||
|
||||
/*
|
||||
* Splits a BTreeNode into two BTreeNodes, removing the splitNode from the
|
||||
* calling BTreeNode.
|
||||
*/
|
||||
private BTreeNode split()
|
||||
{
|
||||
BTreeNode rightSibling = new BTreeNode(parent);
|
||||
int index = nrElements / 2;
|
||||
entries[index++].element = null;
|
||||
|
||||
for (int i = 0, nr = nrElements; index <= nr; i++, index++)
|
||||
{
|
||||
rightSibling.entries[i] = entries[index];
|
||||
if (rightSibling.entries[i] != null && rightSibling.entries[i].child != null)
|
||||
rightSibling.entries[i].child.parent = rightSibling;
|
||||
entries[index] = null;
|
||||
nrElements--;
|
||||
rightSibling.nrElements++;
|
||||
}
|
||||
|
||||
rightSibling.nrElements--; // Need to correct for copying the last Entry which has a null element and a child
|
||||
return rightSibling;
|
||||
}
|
||||
|
||||
/*
|
||||
* Creates a new BTreeSet.root which contains only the splitNode and pointers
|
||||
* to it's left and right child.
|
||||
*/
|
||||
private void splitRoot(PropertyNode splitNode, BTreeNode left, BTreeNode right)
|
||||
{
|
||||
BTreeNode newRoot = new BTreeNode(null);
|
||||
newRoot.entries[0].element = splitNode;
|
||||
newRoot.entries[0].child = left;
|
||||
newRoot.entries[1] = new Entry();
|
||||
newRoot.entries[1].child = right;
|
||||
newRoot.nrElements = 1;
|
||||
left.parent = right.parent = newRoot;
|
||||
BTreeSet.this.root = newRoot;
|
||||
}
|
||||
|
||||
private void insertSplitNode(PropertyNode splitNode, BTreeNode left, BTreeNode right, int insertAt)
|
||||
{
|
||||
for (int i = nrElements; i >= insertAt; i--) entries[i + 1] = entries[i];
|
||||
|
||||
entries[insertAt] = new Entry();
|
||||
entries[insertAt].element = splitNode;
|
||||
entries[insertAt].child = left;
|
||||
entries[insertAt + 1].child = right;
|
||||
|
||||
nrElements++;
|
||||
}
|
||||
|
||||
private void insertNewElement(PropertyNode x, int insertAt)
|
||||
{
|
||||
|
||||
for (int i = nrElements; i > insertAt; i--) entries[i] = entries[i - 1];
|
||||
|
||||
entries[insertAt] = new Entry();
|
||||
entries[insertAt].element = x;
|
||||
|
||||
nrElements++;
|
||||
}
|
||||
|
||||
/*
|
||||
* Possibly a deceptive name for a pretty cool method. Uses binary search
|
||||
* to determine the postion in entries[] in which to traverse to find the correct
|
||||
* BTreeNode in which to insert a new element. If the element exists in the calling
|
||||
* BTreeNode than -1 is returned. When the parameter position is true and the element
|
||||
* is present in the calling BTreeNode -1 is returned, if position is false and the
|
||||
* element is contained in the calling BTreeNode than the position of the element
|
||||
* in entries[] is returned.
|
||||
*/
|
||||
private int childToInsertAt(PropertyNode x, boolean position)
|
||||
{
|
||||
int index = nrElements / 2;
|
||||
|
||||
if (entries[index] == null || entries[index].element == null) return index;
|
||||
|
||||
int lo = 0, hi = nrElements - 1;
|
||||
while (lo <= hi)
|
||||
{
|
||||
if (BTreeSet.this.compare(x, entries[index].element) > 0)
|
||||
{
|
||||
lo = index + 1;
|
||||
index = (hi + lo) / 2;
|
||||
}
|
||||
else
|
||||
{
|
||||
hi = index - 1;
|
||||
index = (hi + lo) / 2;
|
||||
}
|
||||
}
|
||||
|
||||
hi++;
|
||||
if (entries[hi] == null || entries[hi].element == null) return hi;
|
||||
return (!position ? hi : BTreeSet.this.compare(x, entries[hi].element) == 0 ? -1 : hi);
|
||||
}
|
||||
|
||||
|
||||
private void deleteElement(PropertyNode x)
|
||||
{
|
||||
int index = childToInsertAt(x, false);
|
||||
for (; index < (nrElements - 1); index++) entries[index] = entries[index + 1];
|
||||
|
||||
if (nrElements == 1) entries[index] = new Entry(); // This is root and it is empty
|
||||
else entries[index] = null;
|
||||
|
||||
nrElements--;
|
||||
}
|
||||
|
||||
private void prepareForDeletion(int parentIndex)
|
||||
{
|
||||
if (isRoot()) return; // Don't attempt to steal or merge if your the root
|
||||
|
||||
// If not root then try to steal left
|
||||
else if (parentIndex != 0 && parent.entries[parentIndex - 1].child.nrElements > MIN)
|
||||
{
|
||||
stealLeft(parentIndex);
|
||||
return;
|
||||
}
|
||||
|
||||
// If not root and can't steal left try to steal right
|
||||
else if (parentIndex < entries.length && parent.entries[parentIndex + 1] != null && parent.entries[parentIndex + 1].child != null && parent.entries[parentIndex + 1].child.nrElements > MIN)
|
||||
{
|
||||
stealRight(parentIndex);
|
||||
return;
|
||||
}
|
||||
|
||||
// If not root and can't steal left or right then try to merge left
|
||||
else if (parentIndex != 0) {
|
||||
mergeLeft(parentIndex);
|
||||
return;
|
||||
}
|
||||
|
||||
// If not root and can't steal left or right and can't merge left you must be able to merge right
|
||||
else mergeRight(parentIndex);
|
||||
}
|
||||
|
||||
private void fixAfterDeletion(int parentIndex)
|
||||
{
|
||||
if (isRoot() || parent.isRoot()) return; // No fixing needed
|
||||
|
||||
if (parent.nrElements < MIN)
|
||||
{ // If parent lost it's n/2 element repair it
|
||||
BTreeNode temp = parent;
|
||||
temp.prepareForDeletion(parentIndex);
|
||||
if (temp.parent == null) return; // Root changed
|
||||
if (!temp.parent.isRoot() && temp.parent.nrElements < MIN)
|
||||
{ // If need be recurse
|
||||
BTreeNode x = temp.parent.parent;
|
||||
int i = 0;
|
||||
// Find parent's parentIndex
|
||||
for (; i < entries.length; i++) if (x.entries[i].child == temp.parent) break;
|
||||
temp.parent.fixAfterDeletion(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void switchWithSuccessor(PropertyNode x)
|
||||
{
|
||||
int index = childToInsertAt(x, false);
|
||||
BTreeNode temp = entries[index + 1].child;
|
||||
while (temp.entries[0] != null && temp.entries[0].child != null) temp = temp.entries[0].child;
|
||||
PropertyNode successor = temp.entries[0].element;
|
||||
temp.entries[0].element = entries[index].element;
|
||||
entries[index].element = successor;
|
||||
}
|
||||
|
||||
/*
|
||||
* This method is called only when the BTreeNode has the minimum number of elements,
|
||||
* has a leftSibling, and the leftSibling has more than the minimum number of elements.
|
||||
*/
|
||||
private void stealLeft(int parentIndex)
|
||||
{
|
||||
BTreeNode p = parent;
|
||||
BTreeNode ls = parent.entries[parentIndex - 1].child;
|
||||
|
||||
if (isLeaf())
|
||||
{ // When stealing from leaf to leaf don't worry about children
|
||||
int add = childToInsertAt(p.entries[parentIndex - 1].element, true);
|
||||
insertNewElement(p.entries[parentIndex - 1].element, add);
|
||||
p.entries[parentIndex - 1].element = ls.entries[ls.nrElements - 1].element;
|
||||
ls.entries[ls.nrElements - 1] = null;
|
||||
ls.nrElements--;
|
||||
}
|
||||
|
||||
else
|
||||
{ // Was called recursively to fix an undermanned parent
|
||||
entries[0].element = p.entries[parentIndex - 1].element;
|
||||
p.entries[parentIndex - 1].element = ls.entries[ls.nrElements - 1].element;
|
||||
entries[0].child = ls.entries[ls.nrElements].child;
|
||||
entries[0].child.parent = this;
|
||||
ls.entries[ls.nrElements] = null;
|
||||
ls.entries[ls.nrElements - 1].element = null;
|
||||
nrElements++;
|
||||
ls.nrElements--;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* This method is called only when stealLeft can't be called, the BTreeNode
|
||||
* has the minimum number of elements, has a rightSibling, and the rightSibling
|
||||
* has more than the minimum number of elements.
|
||||
*/
|
||||
private void stealRight(int parentIndex)
|
||||
{
|
||||
BTreeNode p = parent;
|
||||
BTreeNode rs = p.entries[parentIndex + 1].child;
|
||||
|
||||
if (isLeaf())
|
||||
{ // When stealing from leaf to leaf don't worry about children
|
||||
entries[nrElements] = new Entry();
|
||||
entries[nrElements].element = p.entries[parentIndex].element;
|
||||
p.entries[parentIndex].element = rs.entries[0].element;
|
||||
for (int i = 0; i < rs.nrElements; i++) rs.entries[i] = rs.entries[i + 1];
|
||||
rs.entries[rs.nrElements - 1] = null;
|
||||
nrElements++;
|
||||
rs.nrElements--;
|
||||
}
|
||||
|
||||
else
|
||||
{ // Was called recursively to fix an undermanned parent
|
||||
for (int i = 0; i <= nrElements; i++) entries[i] = entries[i + 1];
|
||||
entries[nrElements].element = p.entries[parentIndex].element;
|
||||
p.entries[parentIndex].element = rs.entries[0].element;
|
||||
entries[nrElements + 1] = new Entry();
|
||||
entries[nrElements + 1].child = rs.entries[0].child;
|
||||
entries[nrElements + 1].child.parent = this;
|
||||
for (int i = 0; i <= rs.nrElements; i++) rs.entries[i] = rs.entries[i + 1];
|
||||
rs.entries[rs.nrElements] = null;
|
||||
nrElements++;
|
||||
rs.nrElements--;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* This method is called only when stealLeft and stealRight could not be called,
|
||||
* the BTreeNode has the minimum number of elements, has a leftSibling, and the
|
||||
* leftSibling has more than the minimum number of elements. If after completion
|
||||
* parent has fewer than the minimum number of elements than the parents entries[0]
|
||||
* slot is left empty in anticipation of a recursive call to stealLeft, stealRight,
|
||||
* mergeLeft, or mergeRight to fix the parent. All of the before-mentioned methods
|
||||
* expect the parent to be in such a condition.
|
||||
*/
|
||||
private void mergeLeft(int parentIndex)
|
||||
{
|
||||
BTreeNode p = parent;
|
||||
BTreeNode ls = p.entries[parentIndex - 1].child;
|
||||
|
||||
if (isLeaf())
|
||||
{ // Don't worry about children
|
||||
int add = childToInsertAt(p.entries[parentIndex - 1].element, true);
|
||||
insertNewElement(p.entries[parentIndex - 1].element, add); // Could have been a successor switch
|
||||
p.entries[parentIndex - 1].element = null;
|
||||
|
||||
for (int i = nrElements - 1, nr = ls.nrElements; i >= 0; i--)
|
||||
entries[i + nr] = entries[i];
|
||||
|
||||
for (int i = ls.nrElements - 1; i >= 0; i--)
|
||||
{
|
||||
entries[i] = ls.entries[i];
|
||||
nrElements++;
|
||||
}
|
||||
|
||||
if (p.nrElements == MIN && p != BTreeSet.this.root)
|
||||
{
|
||||
|
||||
for (int x = parentIndex - 1, y = parentIndex - 2; y >= 0; x--, y--)
|
||||
p.entries[x] = p.entries[y];
|
||||
p.entries[0] = new Entry();
|
||||
p.entries[0].child = ls; //So p doesn't think it's a leaf this will be deleted in the next recursive call
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
|
||||
for (int x = parentIndex - 1, y = parentIndex; y <= p.nrElements; x++, y++)
|
||||
p.entries[x] = p.entries[y];
|
||||
p.entries[p.nrElements] = null;
|
||||
}
|
||||
|
||||
p.nrElements--;
|
||||
|
||||
if (p.isRoot() && p.nrElements == 0)
|
||||
{ // It's the root and it's empty
|
||||
BTreeSet.this.root = this;
|
||||
parent = null;
|
||||
}
|
||||
}
|
||||
|
||||
else
|
||||
{ // I'm not a leaf but fixing the tree structure
|
||||
entries[0].element = p.entries[parentIndex - 1].element;
|
||||
entries[0].child = ls.entries[ls.nrElements].child;
|
||||
nrElements++;
|
||||
|
||||
for (int x = nrElements, nr = ls.nrElements; x >= 0; x--)
|
||||
entries[x + nr] = entries[x];
|
||||
|
||||
for (int x = ls.nrElements - 1; x >= 0; x--)
|
||||
{
|
||||
entries[x] = ls.entries[x];
|
||||
entries[x].child.parent = this;
|
||||
nrElements++;
|
||||
}
|
||||
|
||||
if (p.nrElements == MIN && p != BTreeSet.this.root)
|
||||
{ // Push everything to the right
|
||||
for (int x = parentIndex - 1, y = parentIndex - 2; y >= 0; x++, y++)
|
||||
{
|
||||
System.out.println(x + " " + y);
|
||||
p.entries[x] = p.entries[y];
|
||||
}
|
||||
p.entries[0] = new Entry();
|
||||
}
|
||||
|
||||
else
|
||||
{ // Either p.nrElements > MIN or p == BTreeSet.this.root so push everything to the left
|
||||
for (int x = parentIndex - 1, y = parentIndex; y <= p.nrElements; x++, y++)
|
||||
p.entries[x] = p.entries[y];
|
||||
p.entries[p.nrElements] = null;
|
||||
}
|
||||
|
||||
p.nrElements--;
|
||||
|
||||
if (p.isRoot() && p.nrElements == 0)
|
||||
{ // p == BTreeSet.this.root and it's empty
|
||||
BTreeSet.this.root = this;
|
||||
parent = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* This method is called only when stealLeft, stealRight, and mergeLeft could not be called,
|
||||
* the BTreeNode has the minimum number of elements, has a rightSibling, and the
|
||||
* rightSibling has more than the minimum number of elements. If after completion
|
||||
* parent has fewer than the minimum number of elements than the parents entries[0]
|
||||
* slot is left empty in anticipation of a recursive call to stealLeft, stealRight,
|
||||
* mergeLeft, or mergeRight to fix the parent. All of the before-mentioned methods
|
||||
* expect the parent to be in such a condition.
|
||||
*/
|
||||
private void mergeRight(int parentIndex)
|
||||
{
|
||||
BTreeNode p = parent;
|
||||
BTreeNode rs = p.entries[parentIndex + 1].child;
|
||||
|
||||
if (isLeaf())
|
||||
{ // Don't worry about children
|
||||
entries[nrElements] = new Entry();
|
||||
entries[nrElements].element = p.entries[parentIndex].element;
|
||||
nrElements++;
|
||||
for (int i = 0, nr = nrElements; i < rs.nrElements; i++, nr++)
|
||||
{
|
||||
entries[nr] = rs.entries[i];
|
||||
nrElements++;
|
||||
}
|
||||
p.entries[parentIndex].element = p.entries[parentIndex + 1].element;
|
||||
if (p.nrElements == MIN && p != BTreeSet.this.root)
|
||||
{
|
||||
for (int x = parentIndex + 1, y = parentIndex; y >= 0; x--, y--)
|
||||
p.entries[x] = p.entries[y];
|
||||
p.entries[0] = new Entry();
|
||||
p.entries[0].child = rs; // So it doesn't think it's a leaf, this child will be deleted in the next recursive call
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
for (int x = parentIndex + 1, y = parentIndex + 2; y <= p.nrElements; x++, y++)
|
||||
p.entries[x] = p.entries[y];
|
||||
p.entries[p.nrElements] = null;
|
||||
}
|
||||
|
||||
p.nrElements--;
|
||||
if (p.isRoot() && p.nrElements == 0)
|
||||
{ // It's the root and it's empty
|
||||
BTreeSet.this.root = this;
|
||||
parent = null;
|
||||
}
|
||||
}
|
||||
|
||||
else
|
||||
{ // It's not a leaf
|
||||
|
||||
entries[nrElements].element = p.entries[parentIndex].element;
|
||||
nrElements++;
|
||||
|
||||
for (int x = nrElements + 1, y = 0; y <= rs.nrElements; x++, y++)
|
||||
{
|
||||
entries[x] = rs.entries[y];
|
||||
rs.entries[y].child.parent = this;
|
||||
nrElements++;
|
||||
}
|
||||
nrElements--;
|
||||
|
||||
p.entries[++parentIndex].child = this;
|
||||
|
||||
if (p.nrElements == MIN && p != BTreeSet.this.root)
|
||||
{
|
||||
for (int x = parentIndex - 1, y = parentIndex - 2; y >= 0; x--, y--)
|
||||
p.entries[x] = p.entries[y];
|
||||
p.entries[0] = new Entry();
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
for (int x = parentIndex - 1, y = parentIndex; y <= p.nrElements; x++, y++)
|
||||
p.entries[x] = p.entries[y];
|
||||
p.entries[p.nrElements] = null;
|
||||
}
|
||||
|
||||
p.nrElements--;
|
||||
|
||||
if (p.isRoot() && p.nrElements == 0)
|
||||
{ // It's the root and it's empty
|
||||
BTreeSet.this.root = this;
|
||||
parent = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,83 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.util;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* TODO Comment me
|
||||
*/
|
||||
@Deprecated
|
||||
public final class NumberFormatter
|
||||
{
|
||||
private final static int ARABIC = 0;
|
||||
private final static int UPPER_ROMAN = 1;
|
||||
private final static int LOWER_ROMAN = 2;
|
||||
private final static int UPPER_LETTER = 3;
|
||||
private final static int LOWER_LETTER = 4;
|
||||
private final static int ORDINAL = 5;
|
||||
|
||||
private static String[] _arabic = new String[] {"1", "2", "3", "4", "5", "6",
|
||||
"7", "8", "9", "10", "11", "12",
|
||||
"13", "14", "15", "16", "17", "18",
|
||||
"19", "20", "21", "22", "23",
|
||||
"24", "25", "26", "27", "28",
|
||||
"29", "30", "31", "32", "33",
|
||||
"34", "35", "36", "37", "38",
|
||||
"39", "40", "41", "42", "43",
|
||||
"44", "45", "46", "47", "48",
|
||||
"49", "50", "51", "52", "53"};
|
||||
private static String[] _roman = new String[]{"i", "ii", "iii", "iv", "v", "vi",
|
||||
"vii", "viii", "ix", "x", "xi", "xii",
|
||||
"xiii","xiv", "xv", "xvi", "xvii",
|
||||
"xviii", "xix", "xx", "xxi", "xxii",
|
||||
"xxiii", "xxiv", "xxv", "xxvi",
|
||||
"xxvii", "xxviii", "xxix", "xxx",
|
||||
"xxxi", "xxxii", "xxxiii", "xxxiv",
|
||||
"xxxv", "xxxvi", "xxxvii", "xxxvii",
|
||||
"xxxviii", "xxxix", "xl", "xli", "xlii",
|
||||
"xliii", "xliv", "xlv", "xlvi", "xlvii",
|
||||
"xlviii", "xlix", "l"};
|
||||
private static String[] _letter = new String[]{"a", "b", "c", "d", "e", "f", "g",
|
||||
"h", "i", "j", "k", "l", "m", "n",
|
||||
"o", "p", "q", "r", "s", "t", "u",
|
||||
"v", "x", "y", "z"};
|
||||
public NumberFormatter()
|
||||
{
|
||||
}
|
||||
public static String getNumber(int num, int style)
|
||||
{
|
||||
switch(style)
|
||||
{
|
||||
case ARABIC:
|
||||
return _arabic[num - 1];
|
||||
case UPPER_ROMAN:
|
||||
return _roman[num-1].toUpperCase(Locale.ROOT);
|
||||
case LOWER_ROMAN:
|
||||
return _roman[num-1];
|
||||
case UPPER_LETTER:
|
||||
return _letter[num-1].toUpperCase(Locale.ROOT);
|
||||
case LOWER_LETTER:
|
||||
return _letter[num-1];
|
||||
case ORDINAL:
|
||||
return _arabic[num - 1];
|
||||
default:
|
||||
return _arabic[num - 1];
|
||||
}
|
||||
}
|
||||
}
|
@ -1,65 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model.util;
|
||||
|
||||
import org.apache.poi.hdf.model.hdftypes.FormattedDiskPage;
|
||||
|
||||
@Deprecated
|
||||
public final class ParsingState
|
||||
{
|
||||
|
||||
//int _numPages;// = charPlcf.length();
|
||||
int _currentPageIndex = 0;
|
||||
FormattedDiskPage _fkp;// = new CHPFormattedDiskPage(fkp);
|
||||
int _currentPropIndex = 0;
|
||||
//int _currentArraySize;// = cfkp.size();
|
||||
|
||||
public ParsingState(int firstPage, FormattedDiskPage fkp)
|
||||
{
|
||||
_fkp = fkp;
|
||||
}
|
||||
//public int getCurrentPage()
|
||||
//{
|
||||
// return _currentPage;
|
||||
//}
|
||||
//public int getNumPages()
|
||||
//{
|
||||
// return _numPages;
|
||||
//}
|
||||
public int getCurrentPageIndex()
|
||||
{
|
||||
return _currentPageIndex;
|
||||
}
|
||||
public FormattedDiskPage getFkp()
|
||||
{
|
||||
return _fkp;
|
||||
}
|
||||
public int getCurrentPropIndex()
|
||||
{
|
||||
return _currentPropIndex;
|
||||
}
|
||||
|
||||
public void setState(int currentPageIndex, FormattedDiskPage fkp, int currentPropIndex)
|
||||
{
|
||||
|
||||
_currentPageIndex = currentPageIndex;
|
||||
_fkp = fkp;
|
||||
_currentPropIndex = currentPropIndex;
|
||||
//_currentArraySize = currentArraySize;
|
||||
}
|
||||
}
|
@ -23,7 +23,6 @@ import org.apache.poi.hdgf.chunks.ChunkFactory.CommandDefinition;
|
||||
import org.apache.poi.util.LittleEndian;
|
||||
import org.apache.poi.util.POILogFactory;
|
||||
import org.apache.poi.util.POILogger;
|
||||
import org.apache.poi.util.StringUtil;
|
||||
|
||||
/**
|
||||
* Base of all chunks, which hold data, flags etc
|
||||
@ -55,7 +54,7 @@ public final class Chunk {
|
||||
this.header = header;
|
||||
this.trailer = trailer;
|
||||
this.separator = separator;
|
||||
this.contents = contents;
|
||||
this.contents = contents.clone();
|
||||
}
|
||||
|
||||
public byte[] _getContents() {
|
||||
@ -116,14 +115,14 @@ public final class Chunk {
|
||||
|
||||
// Loop over the definitions, building the commands
|
||||
// and getting their values
|
||||
ArrayList<Command> commands = new ArrayList<Command>();
|
||||
for(int i=0; i<commandDefinitions.length; i++) {
|
||||
int type = commandDefinitions[i].getType();
|
||||
int offset = commandDefinitions[i].getOffset();
|
||||
ArrayList<Command> commandList = new ArrayList<Command>();
|
||||
for(CommandDefinition cdef : commandDefinitions) {
|
||||
int type = cdef.getType();
|
||||
int offset = cdef.getOffset();
|
||||
|
||||
// Handle virtual commands
|
||||
if(type == 10) {
|
||||
name = commandDefinitions[i].getName();
|
||||
name = cdef.getName();
|
||||
continue;
|
||||
} else if(type == 18) {
|
||||
continue;
|
||||
@ -133,9 +132,9 @@ public final class Chunk {
|
||||
// Build the appropriate command for the type
|
||||
Command command;
|
||||
if(type == 11 || type == 21) {
|
||||
command = new BlockOffsetCommand(commandDefinitions[i]);
|
||||
command = new BlockOffsetCommand(cdef);
|
||||
} else {
|
||||
command = new Command(commandDefinitions[i]);
|
||||
command = new Command(cdef);
|
||||
}
|
||||
|
||||
// Bizarely, many of the offsets are from the start of the
|
||||
@ -234,12 +233,12 @@ public final class Chunk {
|
||||
}
|
||||
|
||||
// Add to the array
|
||||
commands.add(command);
|
||||
commandList.add(command);
|
||||
}
|
||||
|
||||
// Save the commands we liked the look of
|
||||
this.commands = commands.toArray(
|
||||
new Command[commands.size()] );
|
||||
this.commands = commandList.toArray(
|
||||
new Command[commandList.size()] );
|
||||
|
||||
// Now build up the blocks, if we had a command that tells
|
||||
// us where a block is
|
||||
@ -280,13 +279,11 @@ public final class Chunk {
|
||||
* A special kind of command that holds the offset to
|
||||
* a block
|
||||
*/
|
||||
public static class BlockOffsetCommand extends Command {
|
||||
private int offset;
|
||||
private static class BlockOffsetCommand extends Command {
|
||||
private BlockOffsetCommand(CommandDefinition definition) {
|
||||
super(definition, null);
|
||||
}
|
||||
private void setOffset(int offset) {
|
||||
this.offset = offset;
|
||||
value = Integer.valueOf(offset);
|
||||
}
|
||||
}
|
||||
|
@ -49,7 +49,7 @@ public class MAPIAttribute {
|
||||
public MAPIAttribute(MAPIProperty property, int type, byte[] data) {
|
||||
this.property = property;
|
||||
this.type = type;
|
||||
this.data = data;
|
||||
this.data = data.clone();
|
||||
}
|
||||
|
||||
public MAPIProperty getProperty() {
|
||||
|
@ -34,7 +34,7 @@ public abstract class QCBit {
|
||||
public QCBit(String thingType, String bitType, byte[] data) {
|
||||
this.thingType = thingType;
|
||||
this.bitType = bitType;
|
||||
this.data = data;
|
||||
this.data = data.clone();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -19,18 +19,18 @@ package org.apache.poi.hslf.dev;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.apache.poi.ddf.DefaultEscherRecordFactory;
|
||||
import org.apache.poi.ddf.EscherContainerRecord;
|
||||
import org.apache.poi.ddf.EscherRecord;
|
||||
import org.apache.poi.ddf.EscherTextboxRecord;
|
||||
import org.apache.poi.hslf.record.HSLFEscherRecordFactory;
|
||||
import org.apache.poi.hslf.record.RecordTypes;
|
||||
import org.apache.poi.poifs.filesystem.DocumentEntry;
|
||||
import org.apache.poi.poifs.filesystem.NPOIFSFileSystem;
|
||||
import org.apache.poi.hslf.record.HSLFEscherRecordFactory;
|
||||
import org.apache.poi.hslf.record.RecordTypes;
|
||||
import org.apache.poi.util.HexDump;
|
||||
import org.apache.poi.util.LittleEndian;
|
||||
|
||||
/**
|
||||
@ -46,15 +46,15 @@ import org.apache.poi.util.LittleEndian;
|
||||
* from hslf.record.RecordTypes also)
|
||||
*/
|
||||
public final class SlideShowDumper {
|
||||
private NPOIFSFileSystem filesystem;
|
||||
|
||||
private byte[] _docstream;
|
||||
private byte[] docstream;
|
||||
|
||||
/** Do we try to use DDF to understand the escher objects? */
|
||||
private boolean ddfEscher = false;
|
||||
/** Do we use our own built-in basic escher groker to understand the escher objects? */
|
||||
private boolean basicEscher = false;
|
||||
|
||||
private PrintStream out;
|
||||
|
||||
/**
|
||||
* right now this function takes one parameter: a ppt file, and outputs
|
||||
* a dump of what it contains
|
||||
@ -71,7 +71,9 @@ public final class SlideShowDumper {
|
||||
filename = args[1];
|
||||
}
|
||||
|
||||
SlideShowDumper foo = new SlideShowDumper(filename);
|
||||
NPOIFSFileSystem poifs = new NPOIFSFileSystem(new File(filename));
|
||||
SlideShowDumper foo = new SlideShowDumper(poifs, System.out);
|
||||
poifs.close();
|
||||
|
||||
if(args.length > 1) {
|
||||
if(args[0].equalsIgnoreCase("-escher")) {
|
||||
@ -82,33 +84,6 @@ public final class SlideShowDumper {
|
||||
}
|
||||
|
||||
foo.printDump();
|
||||
foo.close();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Constructs a Powerpoint dump from fileName. Parses the document
|
||||
* and dumps out the contents
|
||||
*
|
||||
* @param fileName The name of the file to read.
|
||||
* @throws IOException if there is a problem while parsing the document.
|
||||
*/
|
||||
public SlideShowDumper(String fileName) throws IOException
|
||||
{
|
||||
this(new NPOIFSFileSystem(new File(fileName)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a Powerpoint dump from an input stream. Parses the
|
||||
* document and dumps out the contents
|
||||
*
|
||||
* @param inputStream the source of the data
|
||||
* @throws IOException if there is a problem while parsing the document.
|
||||
*/
|
||||
public SlideShowDumper(InputStream inputStream) throws IOException
|
||||
{
|
||||
//do Ole stuff
|
||||
this(new NPOIFSFileSystem(inputStream));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -118,17 +93,15 @@ public final class SlideShowDumper {
|
||||
* @param filesystem the POIFS FileSystem to read from
|
||||
* @throws IOException if there is a problem while parsing the document.
|
||||
*/
|
||||
public SlideShowDumper(NPOIFSFileSystem filesystem) throws IOException
|
||||
{
|
||||
this.filesystem = filesystem;
|
||||
|
||||
public SlideShowDumper(NPOIFSFileSystem filesystem, PrintStream out) throws IOException {
|
||||
// Get the main document stream
|
||||
DocumentEntry docProps =
|
||||
(DocumentEntry)filesystem.getRoot().getEntry("PowerPoint Document");
|
||||
|
||||
// Grab the document stream
|
||||
_docstream = new byte[docProps.getSize()];
|
||||
filesystem.createDocumentInputStream("PowerPoint Document").read(_docstream);
|
||||
docstream = new byte[docProps.getSize()];
|
||||
filesystem.createDocumentInputStream("PowerPoint Document").read(docstream);
|
||||
this.out = out;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -148,18 +121,7 @@ public final class SlideShowDumper {
|
||||
ddfEscher = !(grok);
|
||||
}
|
||||
|
||||
/**
|
||||
* Shuts things down. Closes underlying streams etc
|
||||
*
|
||||
* @throws IOException
|
||||
*/
|
||||
public void close() throws IOException
|
||||
{
|
||||
filesystem.close();
|
||||
}
|
||||
|
||||
|
||||
public void printDump() {
|
||||
public void printDump() throws IOException {
|
||||
// The format of records in a powerpoint file are:
|
||||
// <little endian 2 byte "info">
|
||||
// <little endian 2 byte "type">
|
||||
@ -189,75 +151,53 @@ public final class SlideShowDumper {
|
||||
// 0x0f (15) and get back 0x0f, you know it has children. Otherwise
|
||||
// it doesn't
|
||||
|
||||
walkTree(0,0,_docstream.length);
|
||||
walkTree(0,0,docstream.length);
|
||||
}
|
||||
|
||||
public String makeHex(short s) {
|
||||
String hex = Integer.toHexString(s).toUpperCase(Locale.ROOT);
|
||||
if(hex.length() == 1) { return "0" + hex; }
|
||||
return hex;
|
||||
}
|
||||
public String makeHex(int i) {
|
||||
String hex = Integer.toHexString(i).toUpperCase(Locale.ROOT);
|
||||
if(hex.length() == 1) { return "000" + hex; }
|
||||
if(hex.length() == 2) { return "00" + hex; }
|
||||
if(hex.length() == 3) { return "0" + hex; }
|
||||
return hex;
|
||||
}
|
||||
|
||||
public void walkTree(int depth, int startPos, int maxLen) {
|
||||
public void walkTree(int depth, int startPos, int maxLen) throws IOException {
|
||||
int pos = startPos;
|
||||
int endPos = startPos + maxLen;
|
||||
int indent = depth;
|
||||
final String ind = (depth == 0) ? "%1$s" : "%1$"+depth+"s";
|
||||
while(pos <= endPos - 8) {
|
||||
long type = LittleEndian.getUShort(_docstream,pos+2);
|
||||
long len = LittleEndian.getUInt(_docstream,pos+4);
|
||||
byte opt = _docstream[pos];
|
||||
long type = LittleEndian.getUShort(docstream,pos+2);
|
||||
long len = LittleEndian.getUInt(docstream,pos+4);
|
||||
byte opt = docstream[pos];
|
||||
|
||||
String ind = "";
|
||||
for(int i=0; i<indent; i++) { ind += " "; }
|
||||
|
||||
System.out.println(ind + "At position " + pos + " (" + makeHex(pos) + "):");
|
||||
System.out.println(ind + "Type is " + type + " (" + makeHex((int)type) + "), len is " + len + " (" + makeHex((int)len) + ")");
|
||||
String fmt = ind+"At position %2$d (%2$04x): type is %3$d (%3$04x), len is %4$d (%4$04x)";
|
||||
out.println(String.format(Locale.ROOT, fmt, "", pos, type, len));
|
||||
|
||||
// See if we know about the type of it
|
||||
String recordName = RecordTypes.recordName((int)type);
|
||||
|
||||
// Jump over header, and think about going on more
|
||||
pos += 8;
|
||||
if(recordName != null) {
|
||||
System.out.println(ind + "That's a " + recordName);
|
||||
out.println(String.format(Locale.ROOT, ind+"That's a %2$s", "", recordName));
|
||||
|
||||
// Now check if it's a container or not
|
||||
int container = opt & 0x0f;
|
||||
// Now check if it's a container or not
|
||||
int container = opt & 0x0f;
|
||||
|
||||
// BinaryTagData seems to contain records, but it
|
||||
// isn't tagged as doing so. Try stepping in anyway
|
||||
if(type == 5003L && opt == 0L) {
|
||||
container = 0x0f;
|
||||
}
|
||||
|
||||
if(type == 0L || (container != 0x0f)) {
|
||||
System.out.println();
|
||||
} else if (type == 1035l || type == 1036l) {
|
||||
// Special Handling of 1035=PPDrawingGroup and 1036=PPDrawing
|
||||
System.out.println();
|
||||
|
||||
if(ddfEscher) {
|
||||
// Seems to be:
|
||||
walkEscherDDF((indent+3),pos+8,(int)len-8);
|
||||
} else if(basicEscher) {
|
||||
walkEscherBasic((indent+3),pos+8,(int)len-8);
|
||||
}
|
||||
} else {
|
||||
// General container record handling code
|
||||
System.out.println();
|
||||
walkTree((indent+2),pos,(int)len);
|
||||
}
|
||||
} else {
|
||||
System.out.println(ind + "** unknown record **");
|
||||
System.out.println();
|
||||
// BinaryTagData seems to contain records, but it
|
||||
// isn't tagged as doing so. Try stepping in anyway
|
||||
if(type == 5003L && opt == 0L) {
|
||||
container = 0x0f;
|
||||
}
|
||||
|
||||
out.println();
|
||||
if (type != 0L && container == 0x0f) {
|
||||
if (type == 1035l || type == 1036l) {
|
||||
// Special Handling of 1035=PPDrawingGroup and 1036=PPDrawing
|
||||
if(ddfEscher) {
|
||||
// Seems to be:
|
||||
walkEscherDDF((depth+3),pos+8,(int)len-8);
|
||||
} else if(basicEscher) {
|
||||
walkEscherBasic((depth+3),pos+8,(int)len-8);
|
||||
}
|
||||
} else {
|
||||
// General container record handling code
|
||||
walkTree((depth+2),pos,(int)len);
|
||||
}
|
||||
}
|
||||
|
||||
pos += (int)len;
|
||||
}
|
||||
}
|
||||
@ -268,11 +208,10 @@ public void walkTree(int depth, int startPos, int maxLen) {
|
||||
public void walkEscherDDF(int indent, int pos, int len) {
|
||||
if(len < 8) { return; }
|
||||
|
||||
String ind = "";
|
||||
for(int i=0; i<indent; i++) { ind += " "; }
|
||||
final String ind = (indent == 0) ? "%1$s" : "%1$"+indent+"s";
|
||||
|
||||
byte[] contents = new byte[len];
|
||||
System.arraycopy(_docstream,pos,contents,0,len);
|
||||
System.arraycopy(docstream,pos,contents,0,len);
|
||||
DefaultEscherRecordFactory erf = new HSLFEscherRecordFactory();
|
||||
EscherRecord record = erf.createRecord(contents,0);
|
||||
|
||||
@ -285,22 +224,21 @@ public void walkTree(int depth, int startPos, int maxLen) {
|
||||
// This (should) include the 8 byte header size
|
||||
int recordLen = record.getRecordSize();
|
||||
|
||||
String fmt = ind+"At position %2$d (%2$04x): type is %3$d (%3$04x), len is %4$d (%4$04x) (%5$d) - record claims %6$d";
|
||||
out.println(String.format(Locale.ROOT, fmt, "", pos, atomType, atomLen, atomLen+8, recordLen));
|
||||
|
||||
System.out.println(ind + "At position " + pos + " (" + makeHex(pos) + "):");
|
||||
System.out.println(ind + "Type is " + atomType + " (" + makeHex((int)atomType) + "), len is " + atomLen + " (" + makeHex((int)atomLen) + ") (" + (atomLen+8) + ") - record claims " + recordLen);
|
||||
|
||||
// Check for corrupt / lying ones
|
||||
if(recordLen != 8 && (recordLen != (atomLen+8))) {
|
||||
System.out.println(ind + "** Atom length of " + atomLen + " (" + (atomLen+8) + ") doesn't match record length of " + recordLen);
|
||||
out.println(String.format(Locale.ROOT, ind+"** Atom length of $2d ($3d) doesn't match record length of %4d", "", atomLen, atomLen+8, recordLen));
|
||||
}
|
||||
|
||||
// Print the record's details
|
||||
if(record instanceof EscherContainerRecord) {
|
||||
EscherContainerRecord ecr = (EscherContainerRecord)record;
|
||||
System.out.println(ind + ecr.toString());
|
||||
String recordStr = record.toString().replace("\n", String.format(Locale.ROOT, "\n"+ind, ""));
|
||||
out.println(String.format(Locale.ROOT, ind+"%2$s", "", recordStr));
|
||||
|
||||
if(record instanceof EscherContainerRecord) {
|
||||
walkEscherDDF((indent+3), pos + 8, (int)atomLen );
|
||||
} else {
|
||||
System.out.println(ind + record.toString());
|
||||
}
|
||||
|
||||
// Handle records that seem to lie
|
||||
@ -313,7 +251,7 @@ public void walkTree(int depth, int startPos, int maxLen) {
|
||||
recordLen = (int)atomLen + 8;
|
||||
record.fillFields( contents, 0, erf );
|
||||
if(! (record instanceof EscherTextboxRecord)) {
|
||||
System.out.println(ind + "** Really a msofbtClientTextbox !");
|
||||
out.println(String.format(Locale.ROOT, ind+"%2$s", "", "** Really a msofbtClientTextbox !"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -344,58 +282,31 @@ public void walkTree(int depth, int startPos, int maxLen) {
|
||||
/**
|
||||
* Use the basic record format groking code to walk the Escher records
|
||||
*/
|
||||
public void walkEscherBasic(int indent, int pos, int len) {
|
||||
public void walkEscherBasic(int indent, int pos, int len) throws IOException {
|
||||
if(len < 8) { return; }
|
||||
|
||||
String ind = "";
|
||||
for(int i=0; i<indent; i++) { ind += " "; }
|
||||
final String ind = (indent == 0) ? "%1$s" : "%1$"+indent+"s";
|
||||
|
||||
long type = LittleEndian.getUShort(_docstream,pos+2);
|
||||
long atomlen = LittleEndian.getUInt(_docstream,pos+4);
|
||||
String typeS = makeHex((int)type);
|
||||
long type = LittleEndian.getUShort(docstream,pos+2);
|
||||
long atomlen = LittleEndian.getUInt(docstream,pos+4);
|
||||
|
||||
System.out.println(ind + "At position " + pos + " (" + makeHex(pos) + "):");
|
||||
System.out.println(ind + "Type is " + type + " (" + typeS + "), len is " + atomlen + " (" + makeHex((int)atomlen) + ")");
|
||||
String fmt = ind+"At position %2$d ($2$04x): type is %3$d (%3$04x), len is %4$d (%4$04x)";
|
||||
out.println(String.format(Locale.ROOT, fmt, "", pos, type, atomlen));
|
||||
|
||||
String typeName = RecordTypes.recordName((int)type);
|
||||
if(typeName != null) {
|
||||
System.out.println(ind + "That's an Escher Record: " + typeName);
|
||||
} else {
|
||||
System.out.println(ind + "(Unknown Escher Record)");
|
||||
}
|
||||
|
||||
|
||||
// Code to print the first 8 bytes
|
||||
// System.out.print(ind);
|
||||
// for(int i=0; i<8; i++) {
|
||||
// short bv = _docstream[i+pos];
|
||||
// if(bv < 0) { bv += 256; }
|
||||
// System.out.print(i + "=" + bv + " (" + makeHex(bv) + ") ");
|
||||
// }
|
||||
// System.out.println("");
|
||||
out.println(String.format(Locale.ROOT, ind+"%2$s", "That's an Escher Record: ", typeName));
|
||||
|
||||
// Record specific dumps
|
||||
if(type == 61453l) {
|
||||
// Text Box. Print out first 8 bytes of data, then 8 4 later
|
||||
System.out.print(ind);
|
||||
for(int i=8; i<16; i++) {
|
||||
short bv = _docstream[i+pos];
|
||||
if(bv < 0) { bv += 256; }
|
||||
System.out.print(i + "=" + bv + " (" + makeHex(bv) + ") ");
|
||||
}
|
||||
System.out.println("");
|
||||
System.out.print(ind);
|
||||
for(int i=20; i<28; i++) {
|
||||
short bv = _docstream[i+pos];
|
||||
if(bv < 0) { bv += 256; }
|
||||
System.out.print(i + "=" + bv + " (" + makeHex(bv) + ") ");
|
||||
}
|
||||
System.out.println("");
|
||||
HexDump.dump(docstream, 0, out, pos+8, 8);
|
||||
HexDump.dump(docstream, 0, out, pos+20, 8);
|
||||
out.println();
|
||||
}
|
||||
|
||||
|
||||
// Blank line before next entry
|
||||
System.out.println("");
|
||||
out.println();
|
||||
|
||||
// Look in children if we are a container
|
||||
if(type == 61443l || type == 61444l) {
|
||||
|
@ -233,14 +233,21 @@ public final class PowerPointExtractor extends POIOLE2TextExtractor {
|
||||
}
|
||||
}
|
||||
|
||||
for (int i = 0; i < _slides.size(); i++) {
|
||||
HSLFSlide slide = _slides.get(i);
|
||||
for (HSLFSlide slide : _slides) {
|
||||
String headerText = "";
|
||||
String footerText = "";
|
||||
HeadersFooters hf = slide.getHeadersFooters();
|
||||
if (hf != null) {
|
||||
if (hf.isHeaderVisible()) {
|
||||
headerText = safeLine(hf.getHeaderText());
|
||||
}
|
||||
if (hf.isFooterVisible()) {
|
||||
footerText = safeLine(hf.getFooterText());
|
||||
}
|
||||
}
|
||||
|
||||
// Slide header, if set
|
||||
HeadersFooters hf = slide.getHeadersFooters();
|
||||
if (hf != null && hf.isHeaderVisible() && hf.getHeaderText() != null) {
|
||||
ret.append(hf.getHeaderText() + "\n");
|
||||
}
|
||||
// Slide header, if set
|
||||
ret.append(headerText);
|
||||
|
||||
// Slide text
|
||||
textRunsToText(ret, slide.getTextParagraphs());
|
||||
@ -252,9 +259,7 @@ public final class PowerPointExtractor extends POIOLE2TextExtractor {
|
||||
}
|
||||
}
|
||||
// Slide footer, if set
|
||||
if (hf != null && hf.isFooterVisible() && hf.getFooterText() != null) {
|
||||
ret.append(hf.getFooterText() + "\n");
|
||||
}
|
||||
ret.append(footerText);
|
||||
|
||||
// Comments, if requested and present
|
||||
if (getCommentText) {
|
||||
@ -274,7 +279,18 @@ public final class PowerPointExtractor extends POIOLE2TextExtractor {
|
||||
// master sheets in. Grab Slide list, then work from there,
|
||||
// but ensure no duplicates
|
||||
HashSet<Integer> seenNotes = new HashSet<Integer>();
|
||||
String headerText = "";
|
||||
String footerText = "";
|
||||
HeadersFooters hf = _show.getNotesHeadersFooters();
|
||||
if (hf != null) {
|
||||
if (hf.isHeaderVisible()) {
|
||||
headerText = safeLine(hf.getHeaderText());
|
||||
}
|
||||
if (hf.isFooterVisible()) {
|
||||
footerText = safeLine(hf.getFooterText());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
for (int i = 0; i < _slides.size(); i++) {
|
||||
HSLFNotes notes = _slides.get(i).getNotes();
|
||||
@ -288,23 +304,23 @@ public final class PowerPointExtractor extends POIOLE2TextExtractor {
|
||||
seenNotes.add(id);
|
||||
|
||||
// Repeat the Notes header, if set
|
||||
if (hf != null && hf.isHeaderVisible() && hf.getHeaderText() != null) {
|
||||
ret.append(hf.getHeaderText() + "\n");
|
||||
}
|
||||
ret.append(headerText);
|
||||
|
||||
// Notes text
|
||||
textRunsToText(ret, notes.getTextParagraphs());
|
||||
|
||||
// Repeat the notes footer, if set
|
||||
if (hf != null && hf.isFooterVisible() && hf.getFooterText() != null) {
|
||||
ret.append(hf.getFooterText() + "\n");
|
||||
}
|
||||
ret.append(footerText);
|
||||
}
|
||||
}
|
||||
|
||||
return ret.toString();
|
||||
}
|
||||
|
||||
private static String safeLine(String text) {
|
||||
return (text == null) ? "" : (text+'\n');
|
||||
}
|
||||
|
||||
private void extractTableText(StringBuffer ret, HSLFTable table) {
|
||||
for (int row = 0; row < table.getNumberOfRows(); row++){
|
||||
for (int col = 0; col < table.getNumberOfColumns(); col++){
|
||||
|
@ -220,8 +220,7 @@ public final class RecordTypes {
|
||||
*/
|
||||
public static String recordName(int type) {
|
||||
String name = typeToName.get(Integer.valueOf(type));
|
||||
if (name == null) name = "Unknown" + type;
|
||||
return name;
|
||||
return (name == null) ? ("Unknown" + type) : name;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -189,13 +189,14 @@ public class ExcelToFoConverter extends AbstractExcelConverter
|
||||
* @return <tt>false</tt> if cell style by itself (without text, i.e.
|
||||
* borders, fill, etc.) worth a mention, <tt>true</tt> otherwise
|
||||
*/
|
||||
protected boolean isEmptyStyle( CellStyle cellStyle )
|
||||
{
|
||||
return cellStyle.getFillPattern() == 0 //
|
||||
&& cellStyle.getBorderTop() == HSSFCellStyle.BORDER_NONE //
|
||||
&& cellStyle.getBorderRight() == HSSFCellStyle.BORDER_NONE //
|
||||
&& cellStyle.getBorderBottom() == HSSFCellStyle.BORDER_NONE //
|
||||
&& cellStyle.getBorderLeft() == HSSFCellStyle.BORDER_NONE; //
|
||||
protected boolean isEmptyStyle( CellStyle cellStyle ) {
|
||||
return cellStyle == null || (
|
||||
cellStyle.getFillPattern() == 0
|
||||
&& cellStyle.getBorderTop() == HSSFCellStyle.BORDER_NONE
|
||||
&& cellStyle.getBorderRight() == HSSFCellStyle.BORDER_NONE
|
||||
&& cellStyle.getBorderBottom() == HSSFCellStyle.BORDER_NONE
|
||||
&& cellStyle.getBorderLeft() == HSSFCellStyle.BORDER_NONE
|
||||
);
|
||||
}
|
||||
|
||||
protected boolean processCell( HSSFWorkbook workbook, HSSFCell cell,
|
||||
@ -226,20 +227,17 @@ public class ExcelToFoConverter extends AbstractExcelConverter
|
||||
}
|
||||
break;
|
||||
case HSSFCell.CELL_TYPE_NUMERIC:
|
||||
HSSFCellStyle style = cellStyle;
|
||||
if ( style == null )
|
||||
{
|
||||
value = String.valueOf( cell.getNumericCellValue() );
|
||||
}
|
||||
else
|
||||
{
|
||||
value = ( _formatter.formatRawCellContents(
|
||||
cell.getNumericCellValue(), style.getDataFormat(),
|
||||
style.getDataFormatString() ) );
|
||||
double nValue = cell.getNumericCellValue();
|
||||
if ( cellStyle == null ) {
|
||||
value = Double.toString( nValue );
|
||||
} else {
|
||||
short df = cellStyle.getDataFormat();
|
||||
String dfs = cellStyle.getDataFormatString();
|
||||
value = _formatter.formatRawCellContents(nValue, df, dfs );
|
||||
}
|
||||
break;
|
||||
case HSSFCell.CELL_TYPE_BOOLEAN:
|
||||
value = String.valueOf( cell.getBooleanCellValue() );
|
||||
value = Boolean.toString( cell.getBooleanCellValue() );
|
||||
break;
|
||||
case HSSFCell.CELL_TYPE_ERROR:
|
||||
value = ErrorEval.getText( cell.getErrorCellValue() );
|
||||
@ -260,7 +258,7 @@ public class ExcelToFoConverter extends AbstractExcelConverter
|
||||
value = _formatter.formatCellValue( cell );
|
||||
break;
|
||||
case HSSFCell.CELL_TYPE_BOOLEAN:
|
||||
value = String.valueOf( cell.getBooleanCellValue() );
|
||||
value = Boolean.toString( cell.getBooleanCellValue() );
|
||||
break;
|
||||
case HSSFCell.CELL_TYPE_ERROR:
|
||||
value = ErrorEval.getText( cell.getErrorCellValue() );
|
||||
@ -272,20 +270,16 @@ public class ExcelToFoConverter extends AbstractExcelConverter
|
||||
}
|
||||
|
||||
final boolean noText = ExcelToHtmlUtils.isEmpty( value );
|
||||
final boolean wrapInDivs = !noText && !cellStyle.getWrapText();
|
||||
final boolean wrapInDivs = !noText && (cellStyle == null || !cellStyle.getWrapText());
|
||||
|
||||
final boolean emptyStyle = isEmptyStyle( cellStyle );
|
||||
if ( !emptyStyle )
|
||||
{
|
||||
if ( noText )
|
||||
{
|
||||
/*
|
||||
* if cell style is defined (like borders, etc.) but cell text
|
||||
* is empty, add " " to output, so browser won't collapse
|
||||
* and ignore cell
|
||||
*/
|
||||
value = "\u00A0";
|
||||
}
|
||||
if ( !emptyStyle && noText ) {
|
||||
/*
|
||||
* if cell style is defined (like borders, etc.) but cell text
|
||||
* is empty, add " " to output, so browser won't collapse
|
||||
* and ignore cell
|
||||
*/
|
||||
value = "\u00A0";
|
||||
}
|
||||
|
||||
if ( isOutputLeadingSpacesAsNonBreaking() && value.startsWith( " " ) )
|
||||
@ -293,13 +287,15 @@ public class ExcelToFoConverter extends AbstractExcelConverter
|
||||
StringBuilder builder = new StringBuilder();
|
||||
for ( int c = 0; c < value.length(); c++ )
|
||||
{
|
||||
if ( value.charAt( c ) != ' ' )
|
||||
if ( value.charAt( c ) != ' ' ) {
|
||||
break;
|
||||
}
|
||||
builder.append( '\u00a0' );
|
||||
}
|
||||
|
||||
if ( value.length() != builder.length() )
|
||||
if ( value.length() != builder.length() ) {
|
||||
builder.append( value.substring( builder.length() ) );
|
||||
}
|
||||
|
||||
value = builder.toString();
|
||||
}
|
||||
|
@ -155,25 +155,21 @@ public class ExcelToHtmlConverter extends AbstractExcelConverter
|
||||
style.append( "white-space:pre-wrap;" );
|
||||
ExcelToHtmlUtils.appendAlign( style, cellStyle.getAlignment() );
|
||||
|
||||
if ( cellStyle.getFillPattern() == 0 )
|
||||
{
|
||||
switch (cellStyle.getFillPattern()) {
|
||||
// no fill
|
||||
}
|
||||
else if ( cellStyle.getFillPattern() == 1 )
|
||||
{
|
||||
final HSSFColor foregroundColor = cellStyle
|
||||
.getFillForegroundColorColor();
|
||||
if ( foregroundColor != null )
|
||||
style.append( "background-color:"
|
||||
+ ExcelToHtmlUtils.getColor( foregroundColor ) + ";" );
|
||||
}
|
||||
else
|
||||
{
|
||||
final HSSFColor backgroundColor = cellStyle
|
||||
.getFillBackgroundColorColor();
|
||||
if ( backgroundColor != null )
|
||||
style.append( "background-color:"
|
||||
+ ExcelToHtmlUtils.getColor( backgroundColor ) + ";" );
|
||||
case 0: break;
|
||||
case 1:
|
||||
final HSSFColor foregroundColor = cellStyle.getFillForegroundColorColor();
|
||||
if ( foregroundColor == null ) break;
|
||||
String fgCol = ExcelToHtmlUtils.getColor( foregroundColor );
|
||||
style.append( "background-color:" + fgCol + ";" );
|
||||
break;
|
||||
default:
|
||||
final HSSFColor backgroundColor = cellStyle.getFillBackgroundColorColor();
|
||||
if ( backgroundColor == null ) break;
|
||||
String bgCol = ExcelToHtmlUtils.getColor( backgroundColor );
|
||||
style.append( "background-color:" + bgCol + ";" );
|
||||
break;
|
||||
}
|
||||
|
||||
buildStyle_border( workbook, style, "top", cellStyle.getBorderTop(),
|
||||
@ -194,8 +190,9 @@ public class ExcelToHtmlConverter extends AbstractExcelConverter
|
||||
private void buildStyle_border( HSSFWorkbook workbook, StringBuilder style,
|
||||
String type, short xlsBorder, short borderColor )
|
||||
{
|
||||
if ( xlsBorder == HSSFCellStyle.BORDER_NONE )
|
||||
if ( xlsBorder == HSSFCellStyle.BORDER_NONE ) {
|
||||
return;
|
||||
}
|
||||
|
||||
StringBuilder borderStyle = new StringBuilder();
|
||||
borderStyle.append( ExcelToHtmlUtils.getBorderWidth( xlsBorder ) );
|
||||
@ -315,16 +312,13 @@ public class ExcelToHtmlConverter extends AbstractExcelConverter
|
||||
}
|
||||
break;
|
||||
case HSSFCell.CELL_TYPE_NUMERIC:
|
||||
HSSFCellStyle style = cellStyle;
|
||||
if ( style == null )
|
||||
{
|
||||
value = String.valueOf( cell.getNumericCellValue() );
|
||||
}
|
||||
else
|
||||
{
|
||||
value = ( _formatter.formatRawCellContents(
|
||||
cell.getNumericCellValue(), style.getDataFormat(),
|
||||
style.getDataFormatString() ) );
|
||||
double nValue = cell.getNumericCellValue();
|
||||
if ( cellStyle == null ) {
|
||||
value = Double.toString(nValue);
|
||||
} else {
|
||||
short df = cellStyle.getDataFormat();
|
||||
String dfs = cellStyle.getDataFormatString();
|
||||
value = _formatter.formatRawCellContents(nValue, df, dfs);
|
||||
}
|
||||
break;
|
||||
case HSSFCell.CELL_TYPE_BOOLEAN:
|
||||
@ -362,27 +356,22 @@ public class ExcelToHtmlConverter extends AbstractExcelConverter
|
||||
|
||||
final boolean noText = ExcelToHtmlUtils.isEmpty( value );
|
||||
final boolean wrapInDivs = !noText && isUseDivsToSpan()
|
||||
&& !cellStyle.getWrapText();
|
||||
&& (cellStyle == null || !cellStyle.getWrapText());
|
||||
|
||||
final short cellStyleIndex = cellStyle.getIndex();
|
||||
if ( cellStyleIndex != 0 )
|
||||
if ( cellStyle != null && cellStyle.getIndex() != 0 )
|
||||
{
|
||||
@SuppressWarnings("resource")
|
||||
HSSFWorkbook workbook = cell.getRow().getSheet().getWorkbook();
|
||||
String mainCssClass = getStyleClassName( workbook, cellStyle );
|
||||
|
||||
if ( wrapInDivs )
|
||||
{
|
||||
if ( wrapInDivs ) {
|
||||
tableCellElement.setAttribute( "class", mainCssClass + " "
|
||||
+ cssClassContainerCell );
|
||||
}
|
||||
else
|
||||
{
|
||||
} else {
|
||||
tableCellElement.setAttribute( "class", mainCssClass );
|
||||
}
|
||||
|
||||
if ( noText )
|
||||
{
|
||||
if ( noText ) {
|
||||
/*
|
||||
* if cell style is defined (like borders, etc.) but cell text
|
||||
* is empty, add " " to output, so browser won't collapse
|
||||
@ -429,8 +418,9 @@ public class ExcelToHtmlConverter extends AbstractExcelConverter
|
||||
innerDivStyle.append( "overflow:hidden;max-height:" );
|
||||
innerDivStyle.append( normalHeightPt );
|
||||
innerDivStyle.append( "pt;white-space:nowrap;" );
|
||||
ExcelToHtmlUtils.appendAlign( innerDivStyle,
|
||||
cellStyle.getAlignment() );
|
||||
if (cellStyle != null) {
|
||||
ExcelToHtmlUtils.appendAlign( innerDivStyle, cellStyle.getAlignment() );
|
||||
}
|
||||
htmlDocumentFacade.addStyleClass( outerDiv, cssClassPrefixDiv,
|
||||
innerDivStyle.toString() );
|
||||
|
||||
@ -443,7 +433,7 @@ public class ExcelToHtmlConverter extends AbstractExcelConverter
|
||||
tableCellElement.appendChild( text );
|
||||
}
|
||||
|
||||
return ExcelToHtmlUtils.isEmpty( value ) && cellStyleIndex == 0;
|
||||
return ExcelToHtmlUtils.isEmpty( value ) && (cellStyle == null || cellStyle.getIndex() == 0);
|
||||
}
|
||||
|
||||
protected void processColumnHeaders( HSSFSheet sheet, int maxSheetColumns,
|
||||
|
@ -20,7 +20,6 @@ package org.apache.poi.hwpf.model.types;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.poi.hdf.model.hdftypes.HDFType;
|
||||
import org.apache.poi.util.BitField;
|
||||
import org.apache.poi.util.Internal;
|
||||
import org.apache.poi.util.LittleEndian;
|
||||
@ -33,61 +32,61 @@ import org.apache.poi.util.LittleEndian;
|
||||
* @author S. Ryan Ackley
|
||||
*/
|
||||
@Internal
|
||||
public abstract class DOPAbstractType implements HDFType {
|
||||
public abstract class DOPAbstractType {
|
||||
|
||||
protected byte field_1_formatFlags;
|
||||
/**/private static BitField fFacingPages = new BitField(0x01);
|
||||
/**/private static BitField fWidowControl = new BitField(0x02);
|
||||
/**/private static BitField fPMHMainDoc = new BitField(0x04);
|
||||
/**/private static BitField grfSupression = new BitField(0x18);
|
||||
/**/private static BitField fpc = new BitField(0x60);
|
||||
/**/private static BitField unused1 = new BitField(0x80);
|
||||
private static final BitField fFacingPages = new BitField(0x01);
|
||||
private static final BitField fWidowControl = new BitField(0x02);
|
||||
private static final BitField fPMHMainDoc = new BitField(0x04);
|
||||
private static final BitField grfSupression = new BitField(0x18);
|
||||
private static final BitField fpc = new BitField(0x60);
|
||||
private static final BitField unused1 = new BitField(0x80);
|
||||
protected byte field_2_unused2;
|
||||
protected short field_3_footnoteInfo;
|
||||
/**/private static BitField rncFtn = new BitField(0x0003);
|
||||
/**/private static BitField nFtn = new BitField(0xfffc);
|
||||
private static final BitField rncFtn = new BitField(0x0003);
|
||||
private static final BitField nFtn = new BitField(0xfffc);
|
||||
protected byte field_4_fOutlineDirtySave;
|
||||
protected byte field_5_docinfo;
|
||||
/**/private static BitField fOnlyMacPics = new BitField(0x01);
|
||||
/**/private static BitField fOnlyWinPics = new BitField(0x02);
|
||||
/**/private static BitField fLabelDoc = new BitField(0x04);
|
||||
/**/private static BitField fHyphCapitals = new BitField(0x08);
|
||||
/**/private static BitField fAutoHyphen = new BitField(0x10);
|
||||
/**/private static BitField fFormNoFields = new BitField(0x20);
|
||||
/**/private static BitField fLinkStyles = new BitField(0x40);
|
||||
/**/private static BitField fRevMarking = new BitField(0x80);
|
||||
private static final BitField fOnlyMacPics = new BitField(0x01);
|
||||
private static final BitField fOnlyWinPics = new BitField(0x02);
|
||||
private static final BitField fLabelDoc = new BitField(0x04);
|
||||
private static final BitField fHyphCapitals = new BitField(0x08);
|
||||
private static final BitField fAutoHyphen = new BitField(0x10);
|
||||
private static final BitField fFormNoFields = new BitField(0x20);
|
||||
private static final BitField fLinkStyles = new BitField(0x40);
|
||||
private static final BitField fRevMarking = new BitField(0x80);
|
||||
protected byte field_6_docinfo1;
|
||||
/**/private static BitField fBackup = new BitField(0x01);
|
||||
/**/private static BitField fExactCWords = new BitField(0x02);
|
||||
/**/private static BitField fPagHidden = new BitField(0x04);
|
||||
/**/private static BitField fPagResults = new BitField(0x08);
|
||||
/**/private static BitField fLockAtn = new BitField(0x10);
|
||||
/**/private static BitField fMirrorMargins = new BitField(0x20);
|
||||
/**/private static BitField unused3 = new BitField(0x40);
|
||||
/**/private static BitField fDfltTrueType = new BitField(0x80);
|
||||
private static final BitField fBackup = new BitField(0x01);
|
||||
private static final BitField fExactCWords = new BitField(0x02);
|
||||
private static final BitField fPagHidden = new BitField(0x04);
|
||||
private static final BitField fPagResults = new BitField(0x08);
|
||||
private static final BitField fLockAtn = new BitField(0x10);
|
||||
private static final BitField fMirrorMargins = new BitField(0x20);
|
||||
private static final BitField unused3 = new BitField(0x40);
|
||||
private static final BitField fDfltTrueType = new BitField(0x80);
|
||||
protected byte field_7_docinfo2;
|
||||
/**/private static BitField fPagSupressTopSpacing = new BitField(0x01);
|
||||
/**/private static BitField fProtEnabled = new BitField(0x02);
|
||||
/**/private static BitField fDispFormFldSel = new BitField(0x04);
|
||||
/**/private static BitField fRMView = new BitField(0x08);
|
||||
/**/private static BitField fRMPrint = new BitField(0x10);
|
||||
/**/private static BitField unused4 = new BitField(0x20);
|
||||
/**/private static BitField fLockRev = new BitField(0x40);
|
||||
/**/private static BitField fEmbedFonts = new BitField(0x80);
|
||||
private static final BitField fPagSupressTopSpacing = new BitField(0x01);
|
||||
private static final BitField fProtEnabled = new BitField(0x02);
|
||||
private static final BitField fDispFormFldSel = new BitField(0x04);
|
||||
private static final BitField fRMView = new BitField(0x08);
|
||||
private static final BitField fRMPrint = new BitField(0x10);
|
||||
private static final BitField unused4 = new BitField(0x20);
|
||||
private static final BitField fLockRev = new BitField(0x40);
|
||||
private static final BitField fEmbedFonts = new BitField(0x80);
|
||||
protected short field_8_docinfo3;
|
||||
/**/private static BitField oldfNoTabForInd = new BitField(0x0001);
|
||||
/**/private static BitField oldfNoSpaceRaiseLower = new BitField(0x0002);
|
||||
/**/private static BitField oldfSuppressSpbfAfterPageBreak = new BitField(0x0004);
|
||||
/**/private static BitField oldfWrapTrailSpaces = new BitField(0x0008);
|
||||
/**/private static BitField oldfMapPrintTextColor = new BitField(0x0010);
|
||||
/**/private static BitField oldfNoColumnBalance = new BitField(0x0020);
|
||||
/**/private static BitField oldfConvMailMergeEsc = new BitField(0x0040);
|
||||
/**/private static BitField oldfSupressTopSpacing = new BitField(0x0080);
|
||||
/**/private static BitField oldfOrigWordTableRules = new BitField(0x0100);
|
||||
/**/private static BitField oldfTransparentMetafiles = new BitField(0x0200);
|
||||
/**/private static BitField oldfShowBreaksInFrames = new BitField(0x0400);
|
||||
/**/private static BitField oldfSwapBordersFacingPgs = new BitField(0x0800);
|
||||
/**/private static BitField unused5 = new BitField(0xf000);
|
||||
private static final BitField oldfNoTabForInd = new BitField(0x0001);
|
||||
private static final BitField oldfNoSpaceRaiseLower = new BitField(0x0002);
|
||||
private static final BitField oldfSuppressSpbfAfterPageBreak = new BitField(0x0004);
|
||||
private static final BitField oldfWrapTrailSpaces = new BitField(0x0008);
|
||||
private static final BitField oldfMapPrintTextColor = new BitField(0x0010);
|
||||
private static final BitField oldfNoColumnBalance = new BitField(0x0020);
|
||||
private static final BitField oldfConvMailMergeEsc = new BitField(0x0040);
|
||||
private static final BitField oldfSupressTopSpacing = new BitField(0x0080);
|
||||
private static final BitField oldfOrigWordTableRules = new BitField(0x0100);
|
||||
private static final BitField oldfTransparentMetafiles = new BitField(0x0200);
|
||||
private static final BitField oldfShowBreaksInFrames = new BitField(0x0400);
|
||||
private static final BitField oldfSwapBordersFacingPgs = new BitField(0x0800);
|
||||
private static final BitField unused5 = new BitField(0xf000);
|
||||
protected int field_9_dxaTab;
|
||||
protected int field_10_wSpare;
|
||||
protected int field_11_dxaHotz;
|
||||
@ -103,16 +102,16 @@ public abstract class DOPAbstractType implements HDFType {
|
||||
protected int field_21_cPg;
|
||||
protected int field_22_cParas;
|
||||
protected short field_23_Edn;
|
||||
/**/private static BitField rncEdn = new BitField(0x0003);
|
||||
/**/private static BitField nEdn = new BitField(0xfffc);
|
||||
private static final BitField rncEdn = new BitField(0x0003);
|
||||
private static final BitField nEdn = new BitField(0xfffc);
|
||||
protected short field_24_Edn1;
|
||||
/**/private static BitField epc = new BitField(0x0003);
|
||||
/**/private static BitField nfcFtnRef1 = new BitField(0x003c);
|
||||
/**/private static BitField nfcEdnRef1 = new BitField(0x03c0);
|
||||
/**/private static BitField fPrintFormData = new BitField(0x0400);
|
||||
/**/private static BitField fSaveFormData = new BitField(0x0800);
|
||||
/**/private static BitField fShadeFormData = new BitField(0x1000);
|
||||
/**/private static BitField fWCFtnEdn = new BitField(0x8000);
|
||||
private static final BitField epc = new BitField(0x0003);
|
||||
private static final BitField nfcFtnRef1 = new BitField(0x003c);
|
||||
private static final BitField nfcEdnRef1 = new BitField(0x03c0);
|
||||
private static final BitField fPrintFormData = new BitField(0x0400);
|
||||
private static final BitField fSaveFormData = new BitField(0x0800);
|
||||
private static final BitField fShadeFormData = new BitField(0x1000);
|
||||
private static final BitField fWCFtnEdn = new BitField(0x8000);
|
||||
protected int field_25_cLines;
|
||||
protected int field_26_cWordsFtnEnd;
|
||||
protected int field_27_cChFtnEdn;
|
||||
@ -121,55 +120,55 @@ public abstract class DOPAbstractType implements HDFType {
|
||||
protected int field_30_cLinesFtnEdn;
|
||||
protected int field_31_lKeyProtDoc;
|
||||
protected short field_32_view;
|
||||
/**/private static BitField wvkSaved = new BitField(0x0007);
|
||||
/**/private static BitField wScaleSaved = new BitField(0x0ff8);
|
||||
/**/private static BitField zkSaved = new BitField(0x3000);
|
||||
/**/private static BitField fRotateFontW6 = new BitField(0x4000);
|
||||
/**/private static BitField iGutterPos = new BitField(0x8000);
|
||||
private static final BitField wvkSaved = new BitField(0x0007);
|
||||
private static final BitField wScaleSaved = new BitField(0x0ff8);
|
||||
private static final BitField zkSaved = new BitField(0x3000);
|
||||
private static final BitField fRotateFontW6 = new BitField(0x4000);
|
||||
private static final BitField iGutterPos = new BitField(0x8000);
|
||||
protected int field_33_docinfo4;
|
||||
/**/private static BitField fNoTabForInd = new BitField(0x00000001);
|
||||
/**/private static BitField fNoSpaceRaiseLower = new BitField(0x00000002);
|
||||
/**/private static BitField fSupressSpdfAfterPageBreak = new BitField(0x00000004);
|
||||
/**/private static BitField fWrapTrailSpaces = new BitField(0x00000008);
|
||||
/**/private static BitField fMapPrintTextColor = new BitField(0x00000010);
|
||||
/**/private static BitField fNoColumnBalance = new BitField(0x00000020);
|
||||
/**/private static BitField fConvMailMergeEsc = new BitField(0x00000040);
|
||||
/**/private static BitField fSupressTopSpacing = new BitField(0x00000080);
|
||||
/**/private static BitField fOrigWordTableRules = new BitField(0x00000100);
|
||||
/**/private static BitField fTransparentMetafiles = new BitField(0x00000200);
|
||||
/**/private static BitField fShowBreaksInFrames = new BitField(0x00000400);
|
||||
/**/private static BitField fSwapBordersFacingPgs = new BitField(0x00000800);
|
||||
/**/private static BitField fSuppressTopSPacingMac5 = new BitField(0x00010000);
|
||||
/**/private static BitField fTruncDxaExpand = new BitField(0x00020000);
|
||||
/**/private static BitField fPrintBodyBeforeHdr = new BitField(0x00040000);
|
||||
/**/private static BitField fNoLeading = new BitField(0x00080000);
|
||||
/**/private static BitField fMWSmallCaps = new BitField(0x00200000);
|
||||
private static final BitField fNoTabForInd = new BitField(0x00000001);
|
||||
private static final BitField fNoSpaceRaiseLower = new BitField(0x00000002);
|
||||
private static final BitField fSupressSpdfAfterPageBreak = new BitField(0x00000004);
|
||||
private static final BitField fWrapTrailSpaces = new BitField(0x00000008);
|
||||
private static final BitField fMapPrintTextColor = new BitField(0x00000010);
|
||||
private static final BitField fNoColumnBalance = new BitField(0x00000020);
|
||||
private static final BitField fConvMailMergeEsc = new BitField(0x00000040);
|
||||
private static final BitField fSupressTopSpacing = new BitField(0x00000080);
|
||||
private static final BitField fOrigWordTableRules = new BitField(0x00000100);
|
||||
private static final BitField fTransparentMetafiles = new BitField(0x00000200);
|
||||
private static final BitField fShowBreaksInFrames = new BitField(0x00000400);
|
||||
private static final BitField fSwapBordersFacingPgs = new BitField(0x00000800);
|
||||
private static final BitField fSuppressTopSPacingMac5 = new BitField(0x00010000);
|
||||
private static final BitField fTruncDxaExpand = new BitField(0x00020000);
|
||||
private static final BitField fPrintBodyBeforeHdr = new BitField(0x00040000);
|
||||
private static final BitField fNoLeading = new BitField(0x00080000);
|
||||
private static final BitField fMWSmallCaps = new BitField(0x00200000);
|
||||
protected short field_34_adt;
|
||||
protected byte[] field_35_doptypography;
|
||||
protected byte[] field_36_dogrid;
|
||||
protected short field_37_docinfo5;
|
||||
/**/private static BitField lvl = new BitField(0x001e);
|
||||
/**/private static BitField fGramAllDone = new BitField(0x0020);
|
||||
/**/private static BitField fGramAllClean = new BitField(0x0040);
|
||||
/**/private static BitField fSubsetFonts = new BitField(0x0080);
|
||||
/**/private static BitField fHideLastVersion = new BitField(0x0100);
|
||||
/**/private static BitField fHtmlDoc = new BitField(0x0200);
|
||||
/**/private static BitField fSnapBorder = new BitField(0x0800);
|
||||
/**/private static BitField fIncludeHeader = new BitField(0x1000);
|
||||
/**/private static BitField fIncludeFooter = new BitField(0x2000);
|
||||
/**/private static BitField fForcePageSizePag = new BitField(0x4000);
|
||||
/**/private static BitField fMinFontSizePag = new BitField(0x8000);
|
||||
private static final BitField lvl = new BitField(0x001e);
|
||||
private static final BitField fGramAllDone = new BitField(0x0020);
|
||||
private static final BitField fGramAllClean = new BitField(0x0040);
|
||||
private static final BitField fSubsetFonts = new BitField(0x0080);
|
||||
private static final BitField fHideLastVersion = new BitField(0x0100);
|
||||
private static final BitField fHtmlDoc = new BitField(0x0200);
|
||||
private static final BitField fSnapBorder = new BitField(0x0800);
|
||||
private static final BitField fIncludeHeader = new BitField(0x1000);
|
||||
private static final BitField fIncludeFooter = new BitField(0x2000);
|
||||
private static final BitField fForcePageSizePag = new BitField(0x4000);
|
||||
private static final BitField fMinFontSizePag = new BitField(0x8000);
|
||||
protected short field_38_docinfo6;
|
||||
/**/private static BitField fHaveVersions = new BitField(0x0001);
|
||||
/**/private static BitField fAutoVersions = new BitField(0x0002);
|
||||
private static final BitField fHaveVersions = new BitField(0x0001);
|
||||
private static final BitField fAutoVersions = new BitField(0x0002);
|
||||
protected byte[] field_39_asumyi;
|
||||
protected int field_40_cChWS;
|
||||
protected int field_41_cChWSFtnEdn;
|
||||
protected int field_42_grfDocEvents;
|
||||
protected int field_43_virusinfo;
|
||||
/**/private static BitField fVirusPrompted = new BitField(0x0001);
|
||||
/**/private static BitField fVirusLoadSafe = new BitField(0x0002);
|
||||
/**/private static BitField KeyVirusSession30 = new BitField(0xfffffffc);
|
||||
private static final BitField fVirusPrompted = new BitField(0x0001);
|
||||
private static final BitField fVirusLoadSafe = new BitField(0x0002);
|
||||
private static final BitField KeyVirusSession30 = new BitField(0xfffffffc);
|
||||
protected byte[] field_44_Spare;
|
||||
protected int field_45_reserved1;
|
||||
protected int field_46_reserved2;
|
||||
|
@ -17,7 +17,6 @@
|
||||
|
||||
package org.apache.poi.hwpf.model.types;
|
||||
|
||||
import org.apache.poi.hdf.model.hdftypes.HDFType;
|
||||
import org.apache.poi.util.BitField;
|
||||
import org.apache.poi.util.Internal;
|
||||
|
||||
@ -34,21 +33,21 @@ import org.apache.poi.util.Internal;
|
||||
* File Format Specification [*.doc]
|
||||
*/
|
||||
@Internal
|
||||
public abstract class FLDAbstractType implements HDFType
|
||||
public abstract class FLDAbstractType
|
||||
{
|
||||
|
||||
protected byte field_1_chHolder;
|
||||
private static BitField ch = new BitField( 0x1f );
|
||||
private static BitField reserved = new BitField( 0xe0 );
|
||||
private static final BitField ch = new BitField( 0x1f );
|
||||
private static final BitField reserved = new BitField( 0xe0 );
|
||||
protected byte field_2_flt;
|
||||
private static BitField fDiffer = new BitField( 0x01 );
|
||||
private static BitField fZombieEmbed = new BitField( 0x02 );
|
||||
private static BitField fResultDirty = new BitField( 0x04 );
|
||||
private static BitField fResultEdited = new BitField( 0x08 );
|
||||
private static BitField fLocked = new BitField( 0x10 );
|
||||
private static BitField fPrivateResult = new BitField( 0x20 );
|
||||
private static BitField fNested = new BitField( 0x40 );
|
||||
private static BitField fHasSep = new BitField( 0x40 );
|
||||
private static final BitField fDiffer = new BitField( 0x01 );
|
||||
private static final BitField fZombieEmbed = new BitField( 0x02 );
|
||||
private static final BitField fResultDirty = new BitField( 0x04 );
|
||||
private static final BitField fResultEdited = new BitField( 0x08 );
|
||||
private static final BitField fLocked = new BitField( 0x10 );
|
||||
private static final BitField fPrivateResult = new BitField( 0x20 );
|
||||
private static final BitField fNested = new BitField( 0x40 );
|
||||
private static final BitField fHasSep = new BitField( 0x40 );
|
||||
|
||||
public FLDAbstractType()
|
||||
{
|
||||
|
@ -17,7 +17,6 @@
|
||||
|
||||
package org.apache.poi.hwpf.model.types;
|
||||
|
||||
import org.apache.poi.hdf.model.hdftypes.HDFType;
|
||||
import org.apache.poi.util.BitField;
|
||||
import org.apache.poi.util.Internal;
|
||||
import org.apache.poi.util.LittleEndian;
|
||||
@ -35,18 +34,18 @@ import org.apache.poi.util.LittleEndian;
|
||||
* File Format Specification [*.doc]
|
||||
*/
|
||||
@Internal
|
||||
public abstract class TLPAbstractType implements HDFType
|
||||
public abstract class TLPAbstractType
|
||||
{
|
||||
|
||||
protected short field_1_itl;
|
||||
protected byte field_2_tlp_flags;
|
||||
private static BitField fBorders = new BitField( 0x0001 );
|
||||
private static BitField fShading = new BitField( 0x0002 );
|
||||
private static BitField fFont = new BitField( 0x0004 );
|
||||
private static BitField fColor = new BitField( 0x0008 );
|
||||
private static BitField fBestFit = new BitField( 0x0010 );
|
||||
private static BitField fHdrRows = new BitField( 0x0020 );
|
||||
private static BitField fLastRow = new BitField( 0x0040 );
|
||||
private static final BitField fBorders = new BitField( 0x0001 );
|
||||
private static final BitField fShading = new BitField( 0x0002 );
|
||||
private static final BitField fFont = new BitField( 0x0004 );
|
||||
private static final BitField fColor = new BitField( 0x0008 );
|
||||
private static final BitField fBestFit = new BitField( 0x0010 );
|
||||
private static final BitField fHdrRows = new BitField( 0x0020 );
|
||||
private static final BitField fLastRow = new BitField( 0x0040 );
|
||||
|
||||
public TLPAbstractType()
|
||||
{
|
||||
|
@ -1,71 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.extractor;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.StringWriter;
|
||||
|
||||
import org.apache.poi.POIDataSamples;
|
||||
import org.apache.poi.hwpf.HWPFDocument;
|
||||
import org.apache.poi.hwpf.HWPFTestDataSamples;
|
||||
import org.apache.poi.hwpf.extractor.WordExtractor;
|
||||
import org.junit.Test;
|
||||
|
||||
|
||||
public class TestWordDocument {
|
||||
@SuppressWarnings("deprecation")
|
||||
@Test
|
||||
public void testMain() {
|
||||
// fails, but exception is caught and only printed
|
||||
//WordDocument.main(new String[] {});
|
||||
|
||||
//WordDocument.main(new String[] {"test-data/document/Word95.doc", "/tmp/test.doc"});
|
||||
//WordDocument.main(new String[] {"test-data/document/Word6.doc", "/tmp/test.doc"});
|
||||
WordDocument.main(new String[] {POIDataSamples.getDocumentInstance().getFile("53446.doc").getAbsolutePath(), "/tmp/test.doc"});
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Test
|
||||
public void test47304() throws IOException {
|
||||
HWPFDocument doc = HWPFTestDataSamples.openSampleFile("47304.doc");
|
||||
assertNotNull(doc);
|
||||
|
||||
WordExtractor extractor = new WordExtractor(doc);
|
||||
String text = extractor.getText();
|
||||
//System.out.println(text);
|
||||
assertTrue("Had: " + text, text.contains("Just a \u201Ctest\u201D"));
|
||||
extractor.close();
|
||||
|
||||
WordDocument wordDoc = new WordDocument(POIDataSamples.getDocumentInstance().getFile("47304.doc").getAbsolutePath());
|
||||
|
||||
StringWriter docTextWriter = new StringWriter();
|
||||
PrintWriter out = new PrintWriter(docTextWriter);
|
||||
try {
|
||||
wordDoc.writeAllText(out);
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
docTextWriter.close();
|
||||
|
||||
//System.out.println(docTextWriter.toString());
|
||||
assertTrue("Had: " + docTextWriter.toString(), docTextWriter.toString().contains("Just a \u201Ctest\u201D"));
|
||||
}
|
||||
}
|
@ -1,78 +0,0 @@
|
||||
/* ====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==================================================================== */
|
||||
|
||||
package org.apache.poi.hdf.model;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.poi.POIDataSamples;
|
||||
|
||||
/**
|
||||
* Class to test {@link HDFDocument} functionality
|
||||
*
|
||||
* @author Bob Otterberg
|
||||
*/
|
||||
public final class TestHDFDocument extends TestCase {
|
||||
private static final POIDataSamples _samples = POIDataSamples.getDocumentInstance();
|
||||
|
||||
/**
|
||||
* OBJECTIVE: Test that HDF can read an empty document (empty.doc).<P>
|
||||
* SUCCESS: HDF reads the document. Matches values in their particular positions.<P>
|
||||
* FAILURE: HDF does not read the document or excepts. HDF cannot identify values
|
||||
* in the document in their known positions.<P>
|
||||
*/
|
||||
public void testEmpty() throws IOException {
|
||||
InputStream stream = _samples.openResourceAsStream("empty.doc");
|
||||
new HDFDocument(stream);
|
||||
}
|
||||
|
||||
/**
|
||||
* OBJECTIVE: Test that HDF can read an _very_ simple document (simple.doc).<P>
|
||||
* SUCCESS: HDF reads the document. Matches values in their particular positions.<P>
|
||||
* FAILURE: HDF does not read the document or excepts. HDF cannot identify values
|
||||
* in the document in their known positions.<P>
|
||||
*/
|
||||
public void testSimple() throws IOException {
|
||||
InputStream stream = _samples.openResourceAsStream("simple.doc");
|
||||
new HDFDocument(stream);
|
||||
}
|
||||
|
||||
/**
|
||||
* OBJECTIVE: Test that HDF can read a document containing a simple list (simple-list.doc).<P>
|
||||
* SUCCESS: HDF reads the document. Matches values in their particular positions.<P>
|
||||
* FAILURE: HDF does not read the document or excepts. HDF cannot identify values
|
||||
* in the document in their known positions.<P>
|
||||
*
|
||||
*/
|
||||
public void testSimpleList() throws IOException {
|
||||
InputStream stream = _samples.openResourceAsStream("simple-list.doc");
|
||||
new HDFDocument(stream);
|
||||
}
|
||||
|
||||
/**
|
||||
* OBJECTIVE: Test that HDF can read a document containing a simple table (simple-table.doc).<P>
|
||||
* SUCCESS: HDF reads the document. Matches values in their particular positions.<P>
|
||||
* FAILURE: HDF does not read the document or excepts. HDF cannot identify values
|
||||
* in the document in their known positions.<P>
|
||||
*/
|
||||
public void testSimpleTable() throws IOException {
|
||||
InputStream stream = _samples.openResourceAsStream("simple-table.doc");
|
||||
new HDFDocument(stream);
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user