javadoc fixes
git-svn-id: https://svn.apache.org/repos/asf/poi/trunk@1832978 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
792f556ded
commit
fc2998c82e
@ -55,6 +55,11 @@ public class OLE2ScratchpadExtractorFactory {
|
|||||||
* out what format is desired
|
* out what format is desired
|
||||||
* Note - doesn't check for core-supported formats!
|
* Note - doesn't check for core-supported formats!
|
||||||
* Note - doesn't check for OOXML-supported formats
|
* Note - doesn't check for OOXML-supported formats
|
||||||
|
*
|
||||||
|
* @param poifsDir the directory node to be inspected
|
||||||
|
* @return the format specific text extractor
|
||||||
|
*
|
||||||
|
* @throws IOException when the format specific extraction fails because of invalid entires
|
||||||
*/
|
*/
|
||||||
public static POITextExtractor createExtractor(DirectoryNode poifsDir) throws IOException {
|
public static POITextExtractor createExtractor(DirectoryNode poifsDir) throws IOException {
|
||||||
if (poifsDir.hasEntry("WordDocument")) {
|
if (poifsDir.hasEntry("WordDocument")) {
|
||||||
@ -106,6 +111,12 @@ public class OLE2ScratchpadExtractorFactory {
|
|||||||
* If there are no embedded documents, you'll get back an
|
* If there are no embedded documents, you'll get back an
|
||||||
* empty array. Otherwise, you'll get one open
|
* empty array. Otherwise, you'll get one open
|
||||||
* {@link POITextExtractor} for each embedded file.
|
* {@link POITextExtractor} for each embedded file.
|
||||||
|
*
|
||||||
|
* @param ext the extractor holding the directory to start parsing
|
||||||
|
* @param dirs a list to be filled with directory references holding embedded
|
||||||
|
* @param nonPOIFS a list to be filled with streams which aren't based on POIFS entries
|
||||||
|
*
|
||||||
|
* @throws IOException when the format specific extraction fails because of invalid entires
|
||||||
*/
|
*/
|
||||||
public static void identifyEmbeddedResources(POIOLE2TextExtractor ext, List<Entry> dirs, List<InputStream> nonPOIFS) throws IOException {
|
public static void identifyEmbeddedResources(POIOLE2TextExtractor ext, List<Entry> dirs, List<InputStream> nonPOIFS) throws IOException {
|
||||||
// Find all the embedded directories
|
// Find all the embedded directories
|
||||||
|
@ -101,13 +101,19 @@ public final class HDGFDiagram extends POIReadOnlyDocument {
|
|||||||
/**
|
/**
|
||||||
* Returns the TrailerStream, which is at the root of the
|
* Returns the TrailerStream, which is at the root of the
|
||||||
* tree of Streams.
|
* tree of Streams.
|
||||||
|
*
|
||||||
|
* @return the TrailerStream
|
||||||
*/
|
*/
|
||||||
public TrailerStream getTrailerStream() { return trailer; }
|
public TrailerStream getTrailerStream() { return trailer; }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns all the top level streams, which are the streams
|
* Returns all the top level streams, which are the streams
|
||||||
* pointed to by the TrailerStream.
|
* pointed to by the TrailerStream.
|
||||||
|
*
|
||||||
|
* @return the top level streams
|
||||||
*/
|
*/
|
||||||
public Stream[] getTopLevelStreams() { return trailer.getPointedToStreams(); }
|
public Stream[] getTopLevelStreams() { return trailer.getPointedToStreams(); }
|
||||||
|
|
||||||
public long getDocumentSize() { return docSize; }
|
public long getDocumentSize() { return docSize; }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -154,15 +160,4 @@ public final class HDGFDiagram extends POIReadOnlyDocument {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* For testing only
|
|
||||||
*/
|
|
||||||
public static void main(String args[]) throws Exception {
|
|
||||||
NPOIFSFileSystem pfs = new NPOIFSFileSystem(new File(args[0]));
|
|
||||||
HDGFDiagram hdgf = new HDGFDiagram(pfs);
|
|
||||||
hdgf.debug();
|
|
||||||
hdgf.close();
|
|
||||||
pfs.close();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -46,6 +46,11 @@ public class HDGFLZW extends LZWDecompresser {
|
|||||||
/**
|
/**
|
||||||
* Compress the given input stream, returning the array of bytes
|
* Compress the given input stream, returning the array of bytes
|
||||||
* of the compressed input
|
* of the compressed input
|
||||||
|
*
|
||||||
|
* @param src the compression source byte
|
||||||
|
* @return the compressed stream as bytes
|
||||||
|
*
|
||||||
|
* @throws IOException when the InputStream can't be read
|
||||||
*/
|
*/
|
||||||
public byte[] compress(InputStream src) throws IOException {
|
public byte[] compress(InputStream src) throws IOException {
|
||||||
ByteArrayOutputStream res = new ByteArrayOutputStream();
|
ByteArrayOutputStream res = new ByteArrayOutputStream();
|
||||||
@ -76,6 +81,12 @@ public class HDGFLZW extends LZWDecompresser {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Performs the Visio compatible streaming LZW compression.
|
* Performs the Visio compatible streaming LZW compression.
|
||||||
|
*
|
||||||
|
* @param src the input bytes for the compression
|
||||||
|
* @param res the OutputStream which receives the compressed bytes
|
||||||
|
*
|
||||||
|
* @throws IOException when the InputStream can't be read
|
||||||
|
* or the OutputStream can't be written to
|
||||||
*/
|
*/
|
||||||
public void compress(InputStream src, OutputStream res) throws IOException {
|
public void compress(InputStream src, OutputStream res) throws IOException {
|
||||||
HDGFLZWCompressor c = new HDGFLZWCompressor();
|
HDGFLZWCompressor c = new HDGFLZWCompressor();
|
||||||
|
@ -60,29 +60,47 @@ public final class Chunk {
|
|||||||
public byte[] _getContents() {
|
public byte[] _getContents() {
|
||||||
return contents;
|
return contents;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ChunkHeader getHeader() {
|
public ChunkHeader getHeader() {
|
||||||
return header;
|
return header;
|
||||||
}
|
}
|
||||||
/** Gets the separator between this chunk and the next, if it exists */
|
|
||||||
|
/**
|
||||||
|
* Gets the separator between this chunk and the next, if it exists
|
||||||
|
*
|
||||||
|
* @return the separator
|
||||||
|
*/
|
||||||
public ChunkSeparator getSeparator() {
|
public ChunkSeparator getSeparator() {
|
||||||
return separator;
|
return separator;
|
||||||
}
|
}
|
||||||
/** Gets the trailer for this chunk, if it exists */
|
|
||||||
|
/**
|
||||||
|
* Gets the trailer for this chunk, if it exists
|
||||||
|
*
|
||||||
|
* @return the trailer
|
||||||
|
*/
|
||||||
public ChunkTrailer getTrailer() {
|
public ChunkTrailer getTrailer() {
|
||||||
return trailer;
|
return trailer;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the command definitions, which define and describe much
|
* Gets the command definitions, which define and describe much
|
||||||
* of the data held by the chunk.
|
* of the data held by the chunk.
|
||||||
|
*
|
||||||
|
* @return the command definitions
|
||||||
*/
|
*/
|
||||||
public CommandDefinition[] getCommandDefinitions() {
|
public CommandDefinition[] getCommandDefinitions() {
|
||||||
return commandDefinitions;
|
return commandDefinitions;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Command[] getCommands() {
|
public Command[] getCommands() {
|
||||||
return commands;
|
return commands;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the name of the chunk, as found from the CommandDefinitions
|
* Get the name of the chunk, as found from the CommandDefinitions
|
||||||
|
*
|
||||||
|
* @return the name of the chunk
|
||||||
*/
|
*/
|
||||||
public String getName() {
|
public String getName() {
|
||||||
return name;
|
return name;
|
||||||
@ -91,6 +109,8 @@ public final class Chunk {
|
|||||||
/**
|
/**
|
||||||
* Returns the size of the chunk, including any
|
* Returns the size of the chunk, including any
|
||||||
* headers, trailers and separators.
|
* headers, trailers and separators.
|
||||||
|
*
|
||||||
|
* @return the size of the chunk
|
||||||
*/
|
*/
|
||||||
public int getOnDiskSize() {
|
public int getOnDiskSize() {
|
||||||
int size = header.getSizeInBytes() + contents.length;
|
int size = header.getSizeInBytes() + contents.length;
|
||||||
|
@ -128,8 +128,11 @@ public final class ChunkFactory {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates the appropriate chunk at the given location.
|
* Creates the appropriate chunk at the given location.
|
||||||
* @param data
|
*
|
||||||
* @param offset
|
* @param data the chunk bytes
|
||||||
|
* @param offset the offset into the chunk bytes array to start reading from
|
||||||
|
*
|
||||||
|
* @return the new Chunk
|
||||||
*/
|
*/
|
||||||
public Chunk createChunk(byte[] data, int offset) {
|
public Chunk createChunk(byte[] data, int offset) {
|
||||||
// Create the header
|
// Create the header
|
||||||
|
@ -33,6 +33,11 @@ public abstract class ChunkHeader {
|
|||||||
/**
|
/**
|
||||||
* Creates the appropriate ChunkHeader for the Chunk Header at
|
* Creates the appropriate ChunkHeader for the Chunk Header at
|
||||||
* the given location, for the given document version.
|
* the given location, for the given document version.
|
||||||
|
*
|
||||||
|
* @param documentVersion the documentVersion - 4 and higher is supported
|
||||||
|
* @param data the chunk data
|
||||||
|
* @param offset the start offset in the chunk data
|
||||||
|
* @return the ChunkHeader
|
||||||
*/
|
*/
|
||||||
public static ChunkHeader createChunkHeader(int documentVersion, byte[] data, int offset) {
|
public static ChunkHeader createChunkHeader(int documentVersion, byte[] data, int offset) {
|
||||||
if(documentVersion >= 6) {
|
if(documentVersion >= 6) {
|
||||||
@ -68,6 +73,10 @@ public abstract class ChunkHeader {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the size of a chunk header for the given document version.
|
* Returns the size of a chunk header for the given document version.
|
||||||
|
*
|
||||||
|
* @param documentVersion the documentVersion - 4 and higher is supported
|
||||||
|
*
|
||||||
|
* @return the header size
|
||||||
*/
|
*/
|
||||||
public static int getHeaderSize(int documentVersion) {
|
public static int getHeaderSize(int documentVersion) {
|
||||||
if(documentVersion > 6) {
|
if(documentVersion > 6) {
|
||||||
@ -85,7 +94,7 @@ public abstract class ChunkHeader {
|
|||||||
public abstract Charset getChunkCharset();
|
public abstract Charset getChunkCharset();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the ID/IX of the chunk
|
* @return the ID/IX of the chunk
|
||||||
*/
|
*/
|
||||||
public int getId() {
|
public int getId() {
|
||||||
return id;
|
return id;
|
||||||
@ -94,6 +103,8 @@ public abstract class ChunkHeader {
|
|||||||
/**
|
/**
|
||||||
* Returns the length of the trunk, excluding the length
|
* Returns the length of the trunk, excluding the length
|
||||||
* of the header, trailer or separator.
|
* of the header, trailer or separator.
|
||||||
|
*
|
||||||
|
* @return the length of the trunk
|
||||||
*/
|
*/
|
||||||
public int getLength() {
|
public int getLength() {
|
||||||
return length;
|
return length;
|
||||||
@ -102,6 +113,8 @@ public abstract class ChunkHeader {
|
|||||||
/**
|
/**
|
||||||
* Returns the type of the chunk, which affects the
|
* Returns the type of the chunk, which affects the
|
||||||
* mandatory information
|
* mandatory information
|
||||||
|
*
|
||||||
|
* @return the type of the chunk
|
||||||
*/
|
*/
|
||||||
public int getType() {
|
public int getType() {
|
||||||
return type;
|
return type;
|
||||||
|
Loading…
Reference in New Issue
Block a user