Make sure to always read the expected Blocksize from a stream of a hashed content.

This commit is contained in:
Maschell 2019-04-24 13:27:02 +02:00
parent 06184da8d3
commit a58c1c5b77

View File

@ -212,6 +212,10 @@ public class NUSDecryption extends AESDecryption {
inBlockBuffer = StreamUtils.getChunkFromStream(inputStream, encryptedBlockBuffer, overflow, BLOCKSIZE); inBlockBuffer = StreamUtils.getChunkFromStream(inputStream, encryptedBlockBuffer, overflow, BLOCKSIZE);
if (writeSize > filesize) writeSize = filesize; if (writeSize > filesize) writeSize = filesize;
if (inBlockBuffer != BLOCKSIZE) {
throw new IOException("wasn't able to read " + BLOCKSIZE);
}
byte[] output; byte[] output;
try { try {
output = decryptFileChunkHash(encryptedBlockBuffer, (int) block, contentIndex, h3Hash); output = decryptFileChunkHash(encryptedBlockBuffer, (int) block, contentIndex, h3Hash);