1
0
mirror of https://github.com/mitb-archive/filebot synced 2024-11-16 06:15:02 -05:00

* ability to calculate osdb hash from any InputStream, not just files

This commit is contained in:
Reinhard Pointner 2008-12-21 13:01:48 +00:00
parent 857a550939
commit 036cf30d3e

View File

@ -2,9 +2,11 @@
package net.sourceforge.filebot.web; package net.sourceforge.filebot.web;
import java.io.DataInputStream;
import java.io.File; import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
import java.nio.LongBuffer; import java.nio.LongBuffer;
@ -42,6 +44,34 @@ public class OpenSubtitlesHasher {
} }
public static String computeHash(InputStream stream, long length) throws IOException {
int chunkSizeForFile = (int) Math.min(HASH_CHUNK_SIZE, length);
// buffer that will contain the head and the tail chunk, chunks will overlap if length is smaller than two chunks
byte[] chunkBytes = new byte[(int) Math.min(2 * HASH_CHUNK_SIZE, length)];
DataInputStream in = new DataInputStream(stream);
// first chunk
in.readFully(chunkBytes, 0, chunkSizeForFile);
long position = chunkSizeForFile;
long tailChunkPosition = length - chunkSizeForFile;
// seek to position of the tail chunk, or not at all if length is smaller than two chunks
while (position < tailChunkPosition && (position += in.skip(tailChunkPosition - position)) >= 0);
// second chunk, or the rest of the data if length is smaller than two chunks
in.readFully(chunkBytes, chunkSizeForFile, chunkBytes.length - chunkSizeForFile);
long head = computeHashForChunk(ByteBuffer.wrap(chunkBytes, 0, chunkSizeForFile));
long tail = computeHashForChunk(ByteBuffer.wrap(chunkBytes, chunkBytes.length - chunkSizeForFile, chunkSizeForFile));
return String.format("%016x", length + head + tail);
}
private static long computeHashForChunk(ByteBuffer buffer) { private static long computeHashForChunk(ByteBuffer buffer) {
LongBuffer longBuffer = buffer.order(ByteOrder.LITTLE_ENDIAN).asLongBuffer(); LongBuffer longBuffer = buffer.order(ByteOrder.LITTLE_ENDIAN).asLongBuffer();