using System; using System.Collections.Generic; using System.Text; namespace DSDecmp { /// /// Utility class for compression using LZ-like compression schemes. /// public static class LZUtil { /// /// Determine the maximum size of a LZ-compressed block starting at newPtr, using the already compressed data /// starting at oldPtr. Takes O(inLength * oldLength) = O(n^2) time. /// /// The start of the data that needs to be compressed. /// The number of bytes that still need to be compressed. /// (or: the maximum number of bytes that _may_ be compressed into one block) /// The start of the raw file. /// The number of bytes already compressed. /// The offset of the start of the longest block to refer to. /// The minimum allowed value for 'disp'. /// The length of the longest sequence of bytes that can be copied from the already decompressed data. public static unsafe int GetOccurrenceLength(byte* newPtr, int newLength, byte* oldPtr, int oldLength, out int disp, int minDisp = 1) { disp = 0; if (newLength == 0) return 0; int maxLength = 0; // try every possible 'disp' value (disp = oldLength - i) for (int i = 0; i < oldLength - minDisp; i++) { // work from the start of the old data to the end, to mimic the original implementation's behaviour // (and going from start to end or from end to start does not influence the compression ratio anyway) byte* currentOldStart = oldPtr + i; int currentLength = 0; // determine the length we can copy if we go back (oldLength - i) bytes // always check the next 'newLength' bytes, and not just the available 'old' bytes, // as the copied data can also originate from what we're currently trying to compress. for (int j = 0; j < newLength; j++) { // stop when the bytes are no longer the same if (*(currentOldStart + j) != *(newPtr + j)) break; currentLength++; } // update the optimal value if (currentLength > maxLength) { maxLength = currentLength; disp = oldLength - i; // if we cannot do better anyway, stop trying. if (maxLength == newLength) break; } } return maxLength; } } }