46 #ifndef LZ4HC_HEAPMODE
47 # define LZ4HC_HEAPMODE 1
52 #define LZ4_HC_STATIC_LINKING_ONLY
58 # pragma GCC diagnostic ignored "-Wunused-function"
60 #if defined (__clang__)
61 # pragma clang diagnostic ignored "-Wunused-function"
64 #define LZ4_COMMONDEFS_ONLY
65 #ifndef LZ4_SRC_INCLUDED
75 #define OPTIMAL_ML (int)((ML_MASK-1)+MINMATCH)
76 #define LZ4_OPT_NUM (1<<12)
80 #define MIN(a,b) ( (a) < (b) ? (a) : (b) )
81 #define MAX(a,b) ( (a) > (b) ? (a) : (b) )
82 #define HASH_FUNCTION(i) (((i) * 2654435761U) >> ((MINMATCH*8)-LZ4HC_HASH_LOG))
83 #define DELTANEXTMAXD(p) chainTable[(p) & LZ4HC_MAXD_MASK]
84 #define DELTANEXTU16(table, pos) table[(U16)(pos)]
86 #define UPDATABLE(ip, op, anchor) &ip, &op, &anchor
103 if (startingOffset > 1
GB) {
107 startingOffset += 64
KB;
123 U32 const target = (
U32)(
ip - base);
126 while (
idx < target) {
129 if (
delta>LZ4_DISTANCE_MAX)
delta = LZ4_DISTANCE_MAX;
142 const BYTE*
const iMin,
const BYTE*
const mMin)
150 && (
ip[back-1] ==
match[back-1]) )
155 #if defined(_MSC_VER)
156 # define LZ4HC_rotl32(x,r) _rotl(x,r)
158 # define LZ4HC_rotl32(x,r) ((x << r) | (x >> (32 - r)))
164 size_t const bitsToRotate = (
rotate & (
sizeof(pattern) - 1)) << 3;
165 if (bitsToRotate == 0)
return pattern;
174 const BYTE*
const iStart =
ip;
175 reg_t const pattern = (
sizeof(pattern)==8) ?
176 (
reg_t)pattern32 + (((
reg_t)pattern32) << (
sizeof(pattern)*4)) : pattern32;
178 while (
likely(
ip < iEnd-(
sizeof(pattern)-1))) {
180 if (!diff) {
ip+=
sizeof(pattern);
continue; }
182 return (
unsigned)(
ip - iStart);
186 reg_t patternByte = pattern;
187 while ((
ip<iEnd) && (*
ip == (
BYTE)patternByte)) {
188 ip++; patternByte >>= 8;
191 U32 bitOffset = (
sizeof(pattern)*8) - 8;
193 BYTE const byte = (
BYTE)(pattern >> bitOffset);
194 if (*
ip !=
byte)
break;
195 ip ++; bitOffset -= 8;
199 return (
unsigned)(
ip - iStart);
208 const BYTE*
const iStart =
ip;
214 {
const BYTE* bytePtr = (
const BYTE*)(&pattern) + 3;
216 if (
ip[-1] != *bytePtr)
break;
219 return (
unsigned)(iStart -
ip);
229 return ((
U32)((dictLimit - 1) - matchIndex) >= 3);
239 const BYTE*
const iLowLimit,
240 const BYTE*
const iHighLimit,
242 const BYTE** matchpos,
243 const BYTE** startpos,
244 const int maxNbAttempts,
245 const int patternAnalysis,
255 const BYTE*
const lowPrefixPtr = base + dictLimit;
256 const U32 ipIndex = (
U32)(
ip - base);
257 const U32 lowestMatchIndex = (hc4->
lowLimit + (LZ4_DISTANCE_MAX + 1) > ipIndex) ? hc4->
lowLimit : ipIndex - LZ4_DISTANCE_MAX;
259 int const lookBackLength = (
int)(
ip-iLowLimit);
260 int nbAttempts = maxNbAttempts;
261 U32 matchChainPos = 0;
265 size_t srcPatternLength = 0;
267 DEBUGLOG(7,
"LZ4HC_InsertAndGetWiderMatch");
271 DEBUGLOG(7,
"First match at index %u / %u (lowestMatchIndex)",
272 matchIndex, lowestMatchIndex);
274 while ((matchIndex>=lowestMatchIndex) && (nbAttempts>0)) {
277 assert(matchIndex < ipIndex);
278 if (favorDecSpeed && (ipIndex - matchIndex < 8)) {
280 }
else if (matchIndex >= dictLimit) {
281 const BYTE*
const matchPtr = base + matchIndex;
282 assert(matchPtr >= lowPrefixPtr);
285 if (
LZ4_read16(iLowLimit + longest - 1) ==
LZ4_read16(matchPtr - lookBackLength + longest - 1)) {
287 int const back = lookBackLength ?
LZ4HC_countBack(
ip, matchPtr, iLowLimit, lowPrefixPtr) : 0;
290 if (matchLength > longest) {
291 longest = matchLength;
292 *matchpos = matchPtr + back;
293 *startpos =
ip + back;
296 const BYTE*
const matchPtr = dictBase + matchIndex;
300 const BYTE* vLimit =
ip + (dictLimit - matchIndex);
301 if (vLimit > iHighLimit) vLimit = iHighLimit;
303 if ((
ip+matchLength == vLimit) && (vLimit < iHighLimit))
304 matchLength +=
LZ4_count(
ip+matchLength, lowPrefixPtr, iHighLimit);
307 if (matchLength > longest) {
308 longest = matchLength;
309 *matchpos = base + matchIndex + back;
310 *startpos =
ip + back;
313 if (chainSwap && matchLength==longest) {
314 assert(lookBackLength==0);
315 if (matchIndex + (
U32)longest <= ipIndex) {
316 int const kTrigger = 4;
317 U32 distanceToNextMatch = 1;
320 int accel = 1 << kTrigger;
324 step = (accel++ >> kTrigger);
325 if (candidateDist > distanceToNextMatch) {
326 distanceToNextMatch = candidateDist;
328 accel = 1 << kTrigger;
331 if (distanceToNextMatch > 1) {
332 if (distanceToNextMatch > matchIndex)
break;
333 matchIndex -= distanceToNextMatch;
338 if (patternAnalysis && distNextMatch==1 && matchChainPos==0) {
339 U32 const matchCandidateIdx = matchIndex-1;
342 if ( ((pattern & 0xFFFF) == (pattern >> 16))
343 & ((pattern & 0xFF) == (pattern >> 24)) ) {
345 srcPatternLength =
LZ4HC_countPattern(
ip+
sizeof(pattern), iHighLimit, pattern) +
sizeof(pattern);
351 const int extDict = matchCandidateIdx < dictLimit;
352 const BYTE*
const matchPtr = (extDict ? dictBase : base) + matchCandidateIdx;
355 const BYTE*
const iLimit = extDict ? dictBase + dictLimit : iHighLimit;
356 size_t forwardPatternLength =
LZ4HC_countPattern(matchPtr+
sizeof(pattern), iLimit, pattern) +
sizeof(pattern);
357 if (extDict && matchPtr + forwardPatternLength == iLimit) {
361 {
const BYTE*
const lowestMatchPtr = extDict ? dictStart : lowPrefixPtr;
363 size_t currentSegmentLength;
364 if (!extDict && matchPtr - backLength == lowPrefixPtr && hc4->
lowLimit < dictLimit) {
369 backLength = matchCandidateIdx -
MAX(matchCandidateIdx - (
U32)backLength, lowestMatchIndex);
370 assert(matchCandidateIdx - backLength >= lowestMatchIndex);
371 currentSegmentLength = backLength + forwardPatternLength;
373 if ( (currentSegmentLength >= srcPatternLength)
374 && (forwardPatternLength <= srcPatternLength) ) {
375 U32 const newMatchIndex = matchCandidateIdx + (
U32)forwardPatternLength - (
U32)srcPatternLength;
377 matchIndex = newMatchIndex;
380 assert(newMatchIndex >= dictLimit - 3 && newMatchIndex < dictLimit && !extDict);
381 matchIndex = dictLimit;
384 U32 const newMatchIndex = matchCandidateIdx - (
U32)backLength;
386 assert(newMatchIndex >= dictLimit - 3 && newMatchIndex < dictLimit && !extDict);
387 matchIndex = dictLimit;
389 matchIndex = newMatchIndex;
390 if (lookBackLength==0) {
391 size_t const maxML =
MIN(currentSegmentLength, srcPatternLength);
392 if ((
size_t)longest < maxML) {
394 if ((
size_t)(
ip - base) - matchIndex > LZ4_DISTANCE_MAX)
break;
396 longest = (
int)maxML;
397 *matchpos = base + matchIndex;
401 if (distToNextPattern > matchIndex)
break;
402 matchIndex -= distToNextPattern;
409 matchIndex -=
DELTANEXTU16(chainTable, matchIndex + matchChainPos);
415 && ipIndex - lowestMatchIndex < LZ4_DISTANCE_MAX) {
416 size_t const dictEndOffset = (
size_t)(dictCtx->
end - dictCtx->
base);
419 matchIndex = dictMatchIndex + lowestMatchIndex - (
U32)dictEndOffset;
420 while (ipIndex - matchIndex <= LZ4_DISTANCE_MAX && nbAttempts--) {
421 const BYTE*
const matchPtr = dictCtx->
base + dictMatchIndex;
426 const BYTE* vLimit =
ip + (dictEndOffset - dictMatchIndex);
427 if (vLimit > iHighLimit) vLimit = iHighLimit;
433 *matchpos = base + matchIndex + back;
434 *startpos =
ip + back;
438 dictMatchIndex -= nextOffset;
439 matchIndex -= nextOffset;
447 const BYTE*
const ip,
const BYTE*
const iLimit,
448 const BYTE** matchpos,
449 const int maxNbAttempts,
450 const int patternAnalysis,
453 const BYTE* uselessPtr =
ip;
457 return LZ4HC_InsertAndGetWiderMatch(hc4,
ip,
ip, iLimit,
MINMATCH-1, matchpos, &uselessPtr, maxNbAttempts, patternAnalysis, 0 , dict,
favorCompressionRatio);
466 const BYTE** _anchor,
474 #define anchor (*_anchor)
479 #if defined(LZ4_DEBUG) && (LZ4_DEBUG >= 6)
481 static U32 totalCost = 0;
484 U32 const llAdd = (ll>=15) ? ((ll-15) / 255) + 1 : 0;
485 U32 const mlAdd = (matchLength>=19) ? ((matchLength-19) / 255) + 1 : 0;
486 U32 const cost = 1 + llAdd + ll + 2 + mlAdd;
489 DEBUGLOG(6,
"pos:%7u -- literals:%4u, match:%4i, offset:%5u, cost:%4u + %5u",
501 DEBUGLOG(6,
"Not enough room to write %i literals (%i bytes remaining)",
508 for(;
len >= 255 ;
len -= 255) *
op++ = 255;
526 DEBUGLOG(6,
"Not enough room to write match length");
561 const int patternAnalysis = (maxNbAttempts > 128);
573 int ml0, ml, ml2, ml3;
588 while (
ip <= mflimit) {
593 start0 =
ip; ref0 = ref; ml0 = ml;
596 if (
ip+ml <= mflimit) {
598 ip + ml - 2,
ip + 0, matchlimit, ml, &ref2, &start2,
611 if (start2 <
ip + ml0) {
612 ip = start0; ref = ref0; ml = ml0;
616 if ((start2 -
ip) < 3) {
632 correction = new_ml - (
int)(start2 -
ip);
633 if (correction > 0) {
634 start2 += correction;
641 if (start2 + ml2 <= mflimit) {
643 start2 + ml2 - 3, start2, matchlimit, ml2, &ref3, &start3,
651 if (start2 <
ip+ml) ml = (
int)(start2 -
ip);
665 if (start3 <
ip+ml+3) {
666 if (start3 >= (
ip+ml)) {
667 if (start2 <
ip+ml) {
668 int correction = (
int)(
ip+ml - start2);
669 start2 += correction;
702 if (start2 <
ip+ml) {
707 correction = ml - (
int)(start2 -
ip);
708 if (correction > 0) {
709 start2 += correction;
714 ml = (
int)(start2 -
ip);
721 ip = start2; ref = ref2; ml = ml2;
724 start2 = start3; ref2 = ref3; ml2 = ml3;
733 size_t llAdd = (lastRunSize + 255 -
RUN_MASK) / 255;
734 size_t const totalSize = 1 + llAdd + lastRunSize;
736 if (
limit && (
op + totalSize > oend)) {
739 lastRunSize = (
size_t)(oend -
op) - 1 ;
740 llAdd = (lastRunSize + 256 -
RUN_MASK) / 256;
741 lastRunSize -= llAdd;
743 DEBUGLOG(6,
"Final literal run : %i literals", (
int)lastRunSize);
747 size_t accumulator = lastRunSize -
RUN_MASK;
749 for(; accumulator >= 255 ; accumulator -= 255) *
op++ = 255;
750 *
op++ = (
BYTE) accumulator;
760 return (
int) (((
char*)
op)-
dest);
766 size_t const ll_addbytes = (ll + 240) / 255;
767 size_t const ll_totalCost = 1 + ll_addbytes + ll;
768 BYTE*
const maxLitPos = oend - 3;
769 DEBUGLOG(6,
"Last sequence overflowing");
771 if (
op + ll_totalCost <= maxLitPos) {
773 size_t const bytesLeftForMl = (
size_t)(maxLitPos - (
op+ll_totalCost));
774 size_t const maxMlSize =
MINMATCH + (
ML_MASK-1) + (bytesLeftForMl * 255);
776 if ((
size_t)ml > maxMlSize) ml = (
int)maxMlSize;
789 int* srcSizePtr,
int dstCapacity,
790 int const nbSearches,
size_t sufficient_len,
798 const char*
const src,
800 int*
const srcSizePtr,
801 int const dstCapacity,
807 typedef enum { lz4hc, lz4opt } lz4hc_strat_e;
829 DEBUGLOG(4,
"LZ4HC_compress_generic(ctx=%p, src=%p, srcSize=%d, limit=%d)",
835 ctx->end += *srcSizePtr;
838 { cParams_t
const cParam = clTable[cLevel];
842 if (cParam.strat == lz4hc) {
844 src,
dst, srcSizePtr, dstCapacity,
845 cParam.nbSearches,
limit, dict);
847 assert(cParam.strat == lz4opt);
849 src,
dst, srcSizePtr, dstCapacity,
850 cParam.nbSearches, cParam.targetLength,
limit,
854 if (result <= 0)
ctx->dirty = 1;
864 const char*
const src,
866 int*
const srcSizePtr,
867 int const dstCapacity,
879 const char*
const src,
881 int*
const srcSizePtr,
882 int const dstCapacity,
887 const size_t position = (
size_t)(
ctx->end -
ctx->base) -
ctx->lowLimit;
889 if (position >= 64
KB) {
892 }
else if (position == 0 && *srcSizePtr > 4
KB) {
895 ctx->compressionLevel = (short)cLevel;
905 const char*
const src,
907 int*
const srcSizePtr,
908 int const dstCapacity,
956 #if defined(LZ4HC_HEAPMODE) && LZ4HC_HEAPMODE==1
963 #if defined(LZ4HC_HEAPMODE) && LZ4HC_HEAPMODE==1
996 DEBUGLOG(4,
"LZ4_freeStreamHC(%p)", LZ4_streamHCPtr);
997 if (!LZ4_streamHCPtr)
return 0;
1015 MEM_INIT(hcstate, 0,
sizeof(*hcstate)); }
1017 return LZ4_streamHCPtr;
1057 const char* dictionary,
int dictSize)
1060 DEBUGLOG(4,
"LZ4_loadDictHC(ctx:%p, dict:%p, dictSize:%d)", LZ4_streamHCPtr, dictionary, dictSize);
1062 if (dictSize > 64
KB) {
1063 dictionary += (
size_t)dictSize - 64
KB;
1072 ctxPtr->
end = (
const BYTE*)dictionary + dictSize;
1085 DEBUGLOG(4,
"LZ4HC_setExternalDict(%p, %p)", ctxPtr, newBlock);
1094 ctxPtr->
end = newBlock;
1103 const char*
src,
char*
dst,
1104 int* srcSizePtr,
int dstCapacity,
1108 DEBUGLOG(5,
"LZ4_compressHC_continue_generic(ctx=%p, src=%p, srcSize=%d, limit=%d)",
1109 LZ4_streamHCPtr,
src, *srcSizePtr,
limit);
1115 if ((
size_t)(ctxPtr->
end - ctxPtr->
base) > 2
GB) {
1117 if (dictSize > 64
KB) dictSize = 64
KB;
1118 LZ4_loadDictHC(LZ4_streamHCPtr, (
const char*)(ctxPtr->
end) - dictSize, (
int)dictSize);
1126 {
const BYTE* sourceEnd = (
const BYTE*)
src + *srcSizePtr;
1129 if ((sourceEnd > dictBegin) && ((
const BYTE*)
src < dictEnd)) {
1130 if (sourceEnd > dictEnd) sourceEnd = dictEnd;
1162 DEBUGLOG(5,
"LZ4_saveDictHC(%p, %p, %d)", LZ4_streamHCPtr, safeBuffer, dictSize);
1164 if (dictSize > 64
KB) dictSize = 64
KB;
1165 if (dictSize < 4) dictSize = 0;
1166 if (dictSize > prefixSize) dictSize = prefixSize;
1167 if (safeBuffer ==
NULL)
assert(dictSize == 0);
1169 memmove(safeBuffer, streamPtr->
end - dictSize, dictSize);
1170 {
U32 const endIndex = (
U32)(streamPtr->
end - streamPtr->
base);
1171 streamPtr->
end = (
const BYTE*)safeBuffer + dictSize;
1172 streamPtr->
base = streamPtr->
end - endIndex;
1196 int LZ4_compressHC2_limitedOutput_withStateHC (
void*
state,
const char*
src,
char*
dst,
int srcSize,
int maxDstSize,
int cLevel) {
return LZ4_compress_HC_extStateHC(
state,
src,
dst,
srcSize,
maxDstSize, cLevel); }
1209 if (hc4 ==
NULL)
return 1;
1224 if (!LZ4HC_Data)
return 0;
1242 const BYTE *bufferStart =
ctx->internal_donotuse.base +
ctx->internal_donotuse.lowLimit;
1245 return (
char *)(
uptrval)bufferStart;
1293 const BYTE*
ip,
const BYTE*
const iHighLimit,
1294 int minLen,
int nbSearches,
1303 int matchLength =
LZ4HC_InsertAndGetWiderMatch(
ctx,
ip,
ip, iHighLimit, minLen, &matchPtr, &
ip, nbSearches, 1 , 1 , dict, favorDecSpeed);
1304 if (matchLength <= minLen)
return match;
1305 if (favorDecSpeed) {
1306 if ((matchLength>18) & (matchLength<=36)) matchLength=18;
1308 match.len = matchLength;
1315 const char*
const source,
1319 int const nbSearches,
1320 size_t sufficient_len,
1322 int const fullUpdate,
1327 #define TRAILING_LITERALS 3
1328 #ifdef LZ4HC_HEAPMODE
1336 const BYTE*
const iend =
ip + *srcSizePtr;
1341 BYTE* oend =
op + dstCapacity;
1346 #ifdef LZ4HC_HEAPMODE
1347 if (opt ==
NULL)
goto _return_label;
1349 DEBUGLOG(5,
"LZ4HC_compress_optimal(dst=%p, dstCapa=%u)",
dst, (
unsigned)dstCapacity);
1355 while (
ip <= mflimit) {
1357 int best_mlen, best_off;
1358 int cur, last_match_pos = 0;
1361 if (firstMatch.
len==0) {
ip++;
continue; }
1363 if ((
size_t)firstMatch.
len > sufficient_len) {
1365 int const firstML = firstMatch.
len;
1366 const BYTE*
const matchPos =
ip - firstMatch.
off;
1371 goto _dest_overflow;
1378 for (rPos = 0 ; rPos <
MINMATCH ; rPos++) {
1382 opt[rPos].
litlen = llen + rPos;
1383 opt[rPos].
price = cost;
1384 DEBUGLOG(7,
"rPos:%3i => price:%3i (litlen=%i) -- initial setup",
1385 rPos, cost, opt[rPos].litlen);
1389 int const matchML = firstMatch.
len;
1392 for ( ; mlen <= matchML ; mlen++) {
1394 opt[mlen].
mlen = mlen;
1397 opt[mlen].
price = cost;
1398 DEBUGLOG(7,
"rPos:%3i => price:%3i (matchlen=%i) -- initial setup",
1401 last_match_pos = firstMatch.
len;
1404 opt[last_match_pos+addLit].
mlen = 1;
1405 opt[last_match_pos+addLit].
off = 0;
1406 opt[last_match_pos+addLit].
litlen = addLit;
1408 DEBUGLOG(7,
"rPos:%3i => price:%3i (litlen=%i) -- initial setup",
1409 last_match_pos+addLit, opt[last_match_pos+addLit].price, addLit);
1413 for (cur = 1; cur < last_match_pos; cur++) {
1414 const BYTE*
const curPtr =
ip + cur;
1417 if (curPtr > mflimit)
break;
1418 DEBUGLOG(7,
"rPos:%u[%u] vs [%u]%u",
1419 cur, opt[cur].price, opt[cur+1].price, cur+1);
1422 if ( (opt[cur+1].price <= opt[cur].price)
1424 && (opt[cur+
MINMATCH].price < opt[cur].price + 3) )
1428 if (opt[cur+1].price <= opt[cur].price)
continue;
1431 DEBUGLOG(7,
"search at rPos:%u", cur);
1437 if (!newMatch.
len)
continue;
1439 if ( ((
size_t)newMatch.
len > sufficient_len)
1442 best_mlen = newMatch.
len;
1443 best_off = newMatch.
off;
1444 last_match_pos = cur + 1;
1449 {
int const baseLitlen = opt[cur].
litlen;
1451 for (litlen = 1; litlen <
MINMATCH; litlen++) {
1453 int const pos = cur + litlen;
1454 if (price < opt[
pos].price) {
1459 DEBUGLOG(7,
"rPos:%3i => price:%3i (litlen=%i)",
1460 pos, price, opt[
pos].litlen);
1464 {
int const matchML = newMatch.
len;
1468 for ( ; ml <= matchML ; ml++) {
1469 int const pos = cur + ml;
1473 DEBUGLOG(7,
"testing price rPos %i (last_match_pos=%i)",
1474 pos, last_match_pos);
1475 if (opt[cur].mlen == 1) {
1477 price = ((cur > ll) ? opt[cur - ll].price : 0)
1486 || price <= opt[
pos].price - (
int)favorDecSpeed) {
1487 DEBUGLOG(7,
"rPos:%3i => price:%3i (matchlen=%i)",
1490 if ( (ml == matchML)
1491 && (last_match_pos <
pos) )
1492 last_match_pos =
pos;
1501 opt[last_match_pos+addLit].
mlen = 1;
1502 opt[last_match_pos+addLit].
off = 0;
1503 opt[last_match_pos+addLit].
litlen = addLit;
1505 DEBUGLOG(7,
"rPos:%3i => price:%3i (litlen=%i)", last_match_pos+addLit, opt[last_match_pos+addLit].price, addLit);
1510 best_mlen = opt[last_match_pos].
mlen;
1511 best_off = opt[last_match_pos].
off;
1512 cur = last_match_pos - best_mlen;
1516 assert(last_match_pos >= 1);
1517 DEBUGLOG(6,
"reverse traversal, looking for shortest path (last_match_pos=%i)", last_match_pos);
1518 {
int candidate_pos = cur;
1519 int selected_matchLength = best_mlen;
1520 int selected_offset = best_off;
1522 int const next_matchLength = opt[candidate_pos].
mlen;
1523 int const next_offset = opt[candidate_pos].
off;
1524 DEBUGLOG(7,
"pos %i: sequence length %i", candidate_pos, selected_matchLength);
1525 opt[candidate_pos].
mlen = selected_matchLength;
1526 opt[candidate_pos].
off = selected_offset;
1527 selected_matchLength = next_matchLength;
1528 selected_offset = next_offset;
1529 if (next_matchLength > candidate_pos)
break;
1530 assert(next_matchLength > 0);
1531 candidate_pos -= next_matchLength;
1536 while (rPos < last_match_pos) {
1537 int const ml = opt[rPos].
mlen;
1539 if (ml == 1) {
ip++; rPos++;
continue; }
1547 goto _dest_overflow;
1554 size_t llAdd = (lastRunSize + 255 -
RUN_MASK) / 255;
1555 size_t const totalSize = 1 + llAdd + lastRunSize;
1557 if (
limit && (
op + totalSize > oend)) {
1563 lastRunSize = (
size_t)(oend -
op) - 1 ;
1564 llAdd = (lastRunSize + 256 -
RUN_MASK) / 256;
1565 lastRunSize -= llAdd;
1567 DEBUGLOG(6,
"Final literal run : %i literals", (
int)lastRunSize);
1571 size_t accumulator = lastRunSize -
RUN_MASK;
1573 for(; accumulator >= 255 ; accumulator -= 255) *
op++ = 255;
1574 *
op++ = (
BYTE) accumulator;
1591 size_t const ll_addbytes = (ll + 240) / 255;
1592 size_t const ll_totalCost = 1 + ll_addbytes + ll;
1593 BYTE*
const maxLitPos = oend - 3;
1594 DEBUGLOG(6,
"Last sequence overflowing (only %i bytes remaining)", (
int)(oend-1-opSaved));
1596 if (
op + ll_totalCost <= maxLitPos) {
1598 size_t const bytesLeftForMl = (
size_t)(maxLitPos - (
op+ll_totalCost));
1599 size_t const maxMlSize =
MINMATCH + (
ML_MASK-1) + (bytesLeftForMl * 255);
1601 if ((
size_t)ovml > maxMlSize) ovml = (
int)maxMlSize;
1603 DEBUGLOG(6,
"Space to end : %i + ml (%i)", (
int)((oend +
LASTLITERALS) - (
op + ll_totalCost + 2) - 1), ovml);
1608 goto _last_literals;
1611 #ifdef LZ4HC_HEAPMODE
static static sync static getppid static getegid const char static filename char static len const char char static bufsiz static mask static vfork const void static prot static getpgrp const char static swapflags static arg static fd static protocol static who struct sockaddr static addrlen static backlog struct timeval struct timezone static tz const struct iovec static count static mode const void const struct sockaddr static tolen const char static pathname void static offset struct stat static buf void long static basep static whence static length const void static len static semflg const void static shmflg const struct timespec struct timespec static rem const char static group const void start
static static sync static getppid static getegid const char static filename char static len const char char static bufsiz static mask static vfork const void static prot static getpgrp const char static swapflags static arg static fd static protocol static who struct sockaddr static addrlen static backlog struct timeval struct timezone static tz const struct iovec static count static mode const void const struct sockaddr static tolen const char static pathname void static offset struct stat static buf void long static basep static whence static length const void static len static semflg const void static shmflg const struct timespec struct timespec static rem const char static group const void length
static states step(struct re_guts *, sopno, sopno, states, int, states)
static void encode(size_t size, lzma_action action)
unsigned char match[65280+2]
void rotate(unsigned char *list, unsigned len, unsigned rot)
memcpy(mem, inblock.get(), min(CONTAINING_RECORD(inblock.get(), MEMBLOCK, data) ->size, size))
static const int LZ4_minLength
LZ4_FORCE_INLINE void LZ4_wildCopy8(void *dstPtr, const void *srcPtr, void *dstEnd)
#define LZ4_STATIC_ASSERT(c)
int LZ4_compressBound(int isize)
static void LZ4_writeLE16(void *memPtr, U16 value)
static U32 LZ4_read32(const void *memPtr)
LZ4_FORCE_INLINE unsigned LZ4_count(const BYTE *pIn, const BYTE *pMatch, const BYTE *pInLimit)
static unsigned LZ4_NbCommonBytes(reg_t val)
static reg_t LZ4_read_ARCH(const void *memPtr)
static unsigned LZ4_isLittleEndian(void)
#define ALLOC_AND_ZERO(s)
#define MEM_INIT(p, v, s)
static int LZ4_isAligned(const void *ptr, size_t alignment)
static U16 LZ4_read16(const void *memPtr)
char int int maxOutputSize
#define LZ4_MAX_INPUT_SIZE
const char char int inputSize
#define UPDATABLE(ip, op, anchor)
int LZ4_compressHC2_withStateHC(void *state, const char *src, char *dst, int srcSize, int cLevel)
void LZ4_favorDecompressionSpeed(LZ4_streamHC_t *LZ4_streamHCPtr, int favor)
static size_t LZ4_streamHC_t_alignment(void)
#define LZ4HC_rotl32(x, r)
int LZ4_compress_HC_extStateHC(void *state, const char *src, char *dst, int srcSize, int dstCapacity, int compressionLevel)
void LZ4_attach_HC_dictionary(LZ4_streamHC_t *working_stream, const LZ4_streamHC_t *dictionary_stream)
static int LZ4_compressHC_continue_generic(LZ4_streamHC_t *LZ4_streamHCPtr, const char *src, char *dst, int *srcSizePtr, int dstCapacity, limitedOutput_directive limit)
static int LZ4HC_protectDictEnd(U32 const dictLimit, U32 const matchIndex)
#define DELTANEXTU16(table, pos)
void LZ4_resetStreamHC_fast(LZ4_streamHC_t *LZ4_streamHCPtr, int compressionLevel)
int LZ4_compressHC_limitedOutput_continue(LZ4_streamHC_t *ctx, const char *src, char *dst, int srcSize, int maxDstSize)
int LZ4_resetStreamStateHC(void *state, char *inputBuffer)
static unsigned LZ4HC_reverseCountPattern(const BYTE *ip, const BYTE *const iLow, U32 pattern)
LZ4_FORCE_INLINE int LZ4HC_countBack(const BYTE *const ip, const BYTE *const match, const BYTE *const iMin, const BYTE *const mMin)
static void LZ4HC_setExternalDict(LZ4HC_CCtx_internal *ctxPtr, const BYTE *newBlock)
int LZ4_saveDictHC(LZ4_streamHC_t *LZ4_streamHCPtr, char *safeBuffer, int dictSize)
#define TRAILING_LITERALS
LZ4_FORCE_INLINE int LZ4HC_literalsPrice(int const litlen)
LZ4_FORCE_INLINE int LZ4HC_encodeSequence(const BYTE **_ip, BYTE **_op, const BYTE **_anchor, int matchLength, const BYTE *const match, limitedOutput_directive limit, BYTE *oend)
LZ4_streamHC_t * LZ4_initStreamHC(void *buffer, size_t size)
@ favorDecompressionSpeed
int LZ4_freeHC(void *LZ4HC_Data)
static unsigned LZ4HC_countPattern(const BYTE *ip, const BYTE *const iEnd, U32 const pattern32)
LZ4_FORCE_INLINE LZ4HC_match_t LZ4HC_FindLongerMatch(LZ4HC_CCtx_internal *const ctx, const BYTE *ip, const BYTE *const iHighLimit, int minLen, int nbSearches, const dictCtx_directive dict, const HCfavor_e favorDecSpeed)
int LZ4_sizeofStateHC(void)
static U32 LZ4HC_hashPtr(const void *ptr)
static U32 LZ4HC_rotatePattern(size_t const rotate, U32 const pattern)
int LZ4_compress_HC_continue_destSize(LZ4_streamHC_t *LZ4_streamHCPtr, const char *src, char *dst, int *srcSizePtr, int targetDestSize)
int LZ4_compressHC2_limitedOutput_continue(void *LZ4HC_Data, const char *src, char *dst, int srcSize, int dstCapacity, int cLevel)
int LZ4_compressHC2_continue(void *LZ4HC_Data, const char *src, char *dst, int srcSize, int cLevel)
void * LZ4_createHC(const char *inputBuffer)
static int LZ4HC_compress_generic_noDictCtx(LZ4HC_CCtx_internal *const ctx, const char *const src, char *const dst, int *const srcSizePtr, int const dstCapacity, int cLevel, limitedOutput_directive limit)
int LZ4_sizeofStreamStateHC(void)
LZ4_FORCE_INLINE int LZ4HC_sequencePrice(int litlen, int mlen)
int LZ4_compress_HC_destSize(void *state, const char *source, char *dest, int *sourceSizePtr, int targetDestSize, int cLevel)
int LZ4_freeStreamHC(LZ4_streamHC_t *LZ4_streamHCPtr)
int LZ4_compressHC2(const char *src, char *dst, int srcSize, int cLevel)
int LZ4_compressHC_limitedOutput(const char *src, char *dst, int srcSize, int maxDstSize)
int LZ4_compressHC_withStateHC(void *state, const char *src, char *dst, int srcSize)
static int LZ4HC_compress_generic_dictCtx(LZ4HC_CCtx_internal *const ctx, const char *const src, char *const dst, int *const srcSizePtr, int const dstCapacity, int cLevel, limitedOutput_directive limit)
LZ4_FORCE_INLINE void LZ4HC_Insert(LZ4HC_CCtx_internal *hc4, const BYTE *ip)
static void LZ4HC_init_internal(LZ4HC_CCtx_internal *hc4, const BYTE *start)
LZ4_FORCE_INLINE int LZ4HC_compress_generic_internal(LZ4HC_CCtx_internal *const ctx, const char *const src, char *const dst, int *const srcSizePtr, int const dstCapacity, int cLevel, const limitedOutput_directive limit, const dictCtx_directive dict)
int LZ4_compressHC_limitedOutput_withStateHC(void *state, const char *src, char *dst, int srcSize, int maxDstSize)
static int LZ4HC_compress_generic(LZ4HC_CCtx_internal *const ctx, const char *const src, char *const dst, int *const srcSizePtr, int const dstCapacity, int cLevel, limitedOutput_directive limit)
LZ4_FORCE_INLINE int LZ4HC_InsertAndGetWiderMatch(LZ4HC_CCtx_internal *hc4, const BYTE *const ip, const BYTE *const iLowLimit, const BYTE *const iHighLimit, int longest, const BYTE **matchpos, const BYTE **startpos, const int maxNbAttempts, const int patternAnalysis, const int chainSwap, const dictCtx_directive dict, const HCfavor_e favorDecSpeed)
char * LZ4_slideInputBufferHC(void *LZ4HC_Data)
LZ4_FORCE_INLINE int LZ4HC_compress_hashChain(LZ4HC_CCtx_internal *const ctx, const char *const source, char *const dest, int *srcSizePtr, int const maxOutputSize, int maxNbAttempts, const limitedOutput_directive limit, const dictCtx_directive dict)
int LZ4_compress_HC_continue(LZ4_streamHC_t *LZ4_streamHCPtr, const char *src, char *dst, int srcSize, int dstCapacity)
int LZ4_compress_HC(const char *src, char *dst, int srcSize, int dstCapacity, int compressionLevel)
LZ4_streamHC_t * LZ4_createStreamHC(void)
static int LZ4HC_compress_optimal(LZ4HC_CCtx_internal *ctx, const char *const source, char *dst, int *srcSizePtr, int dstCapacity, int const nbSearches, size_t sufficient_len, const limitedOutput_directive limit, int const fullUpdate, const dictCtx_directive dict, const HCfavor_e favorDecSpeed)
int LZ4_compressHC2_limitedOutput_withStateHC(void *state, const char *src, char *dst, int srcSize, int maxDstSize, int cLevel)
int LZ4_compress_HC_extStateHC_fastReset(void *state, const char *src, char *dst, int srcSize, int dstCapacity, int compressionLevel)
void LZ4_setCompressionLevel(LZ4_streamHC_t *LZ4_streamHCPtr, int compressionLevel)
LZ4_FORCE_INLINE int LZ4HC_InsertAndFindBestMatch(LZ4HC_CCtx_internal *const hc4, const BYTE *const ip, const BYTE *const iLimit, const BYTE **matchpos, const int maxNbAttempts, const int patternAnalysis, const dictCtx_directive dict)
int LZ4_loadDictHC(LZ4_streamHC_t *LZ4_streamHCPtr, const char *dictionary, int dictSize)
int LZ4_compressHC_continue(LZ4_streamHC_t *ctx, const char *src, char *dst, int srcSize)
void LZ4_resetStreamHC(LZ4_streamHC_t *LZ4_streamHCPtr, int compressionLevel)
int LZ4_compressHC2_limitedOutput(const char *src, char *dst, int srcSize, int maxDstSize, int cLevel)
int LZ4_compressHC(const char *src, char *dst, int srcSize)
static void LZ4HC_clearTables(LZ4HC_CCtx_internal *hc4)
#define LZ4HC_CLEVEL_DEFAULT
union LZ4_streamHC_u LZ4_streamHC_t
char int int compressionLevel
assert(limit<=UINT32_MAX/2)
static uint32_t const uint8_t uint32_t uint32_t limit
static void repeat(struct parse *, sopno, int, int)
const LZ4_byte * dictBase
const LZ4HC_CCtx_internal * dictCtx
LZ4_u32 hashTable[LZ4HC_HASHTABLESIZE]
LZ4_u16 chainTable[LZ4HC_MAXD]
LZ4HC_CCtx_internal internal_donotuse