17#define LDM_BUCKET_SIZE_LOG 3
18#define LDM_MIN_MATCH_LENGTH 64
19#define LDM_HASH_RLOG 7
20#define LDM_HASH_CHAR_OFFSET 10
23 ZSTD_compressionParameters
const* cParams)
25 params->windowLog = cParams->windowLog;
27 DEBUGLOG(4,
"ZSTD_ldm_adjustParameters");
32 U32 const minMatch =
MAX(cParams->targetLength,
params->minMatchLength);
33 assert(minMatch >= ZSTD_LDM_MINMATCH_MIN);
34 assert(minMatch <= ZSTD_LDM_MINMATCH_MAX);
35 params->minMatchLength = minMatch;
37 if (
params->hashLog == 0) {
41 if (
params->hashRateLog == 0) {
52 size_t const ldmBucketSizeLog =
MIN(
params.bucketSizeLog,
params.hashLog);
53 size_t const ldmBucketSize = ((
size_t)1) << (
params.hashLog - ldmBucketSizeLog);
56 return params.enableLdm ? totalSize : 0;
61 return params.enableLdm ? (maxChunkSize /
params.minMatchLength) : 0;
71 return numBits == 0 ? 0 : (
U32)(
value >> (64 - numBits));
79 assert(numBitsToDiscard <= 32);
80 return (
hash >> (64 - 32 - numBitsToDiscard)) & 0xFFFFFFFF;
91 assert(numTagBits < 32 && hbits <= 32);
92 if (32 - hbits < numTagBits) {
93 return hash & (((
U32)1 << numTagBits) - 1);
95 return (
hash >> (32 - hbits - numTagBits)) & (((
U32)1 << numTagBits) - 1);
115 bucketOffsets[
hash]++;
130 U64 const rollingHash,
137 if (
tag == tagMask) {
152 const BYTE* pIn,
const BYTE* pAnchor,
153 const BYTE* pMatch,
const BYTE* pBase)
155 size_t matchLength = 0;
156 while (pIn > pAnchor && pMatch > pBase && pIn[-1] == pMatch[-1]) {
208 U64 lastHash,
const BYTE* lastHashed,
212 U64 rollingHash = lastHash;
213 const BYTE*
cur = lastHashed + 1;
231 DEBUGLOG(5,
"ZSTD_ldm_fillHashTable");
232 if ((
size_t)(iend -
ip) >=
params->minMatchLength) {
262 U32 const minMatchLength =
params->minMatchLength;
265 U32 const ldmBucketSize = 1U <<
params->bucketSizeLog;
266 U32 const hashRateLog =
params->hashRateLog;
267 U32 const ldmTagMask = (1U <<
params->hashRateLog) - 1;
273 BYTE const*
const dictStart = extDict ? dictBase + lowestIndex :
NULL;
274 BYTE const*
const dictEnd = extDict ? dictBase + dictLimit :
NULL;
275 BYTE const*
const lowPrefixPtr =
base + dictLimit;
278 BYTE const*
const iend = istart + srcSize;
281 BYTE const* anchor = istart;
287 while (
ip <= ilimit) {
290 size_t forwardMatchLength = 0, backwardMatchLength = 0;
294 lastHashed[minMatchLength],
314 size_t bestMatchLength = 0;
317 for (
cur = bucket;
cur < bucket + ldmBucketSize; ++
cur) {
318 size_t curForwardMatchLength, curBackwardMatchLength,
324 BYTE const*
const curMatchBase =
325 cur->offset < dictLimit ? dictBase :
base;
326 BYTE const*
const pMatch = curMatchBase +
cur->offset;
327 BYTE const*
const matchEnd =
328 cur->offset < dictLimit ? dictEnd : iend;
329 BYTE const*
const lowMatchPtr =
330 cur->offset < dictLimit ? dictStart : lowPrefixPtr;
334 matchEnd, lowPrefixPtr);
335 if (curForwardMatchLength < minMatchLength) {
338 curBackwardMatchLength =
341 curTotalMatchLength = curForwardMatchLength +
342 curBackwardMatchLength;
346 if (curForwardMatchLength < minMatchLength) {
349 curBackwardMatchLength =
352 curTotalMatchLength = curForwardMatchLength +
353 curBackwardMatchLength;
356 if (curTotalMatchLength > bestMatchLength) {
357 bestMatchLength = curTotalMatchLength;
358 forwardMatchLength = curForwardMatchLength;
359 backwardMatchLength = curBackwardMatchLength;
366 if (bestEntry ==
NULL) {
375 mLength = forwardMatchLength + backwardMatchLength;
376 ip -= backwardMatchLength;
383 U32 const matchIndex = bestEntry->
offset;
389 return ERROR(dstSize_tooSmall);
401 assert(
ip + backwardMatchLength == lastHashed);
405 if (
ip + mLength <= ilimit) {
407 ldmState, rollingHash, lastHashed,
409 lastHashed =
ip + mLength - 1;
414 return iend - anchor;
420 U32 const reducerValue)
425 else table[
u].offset -= reducerValue;
433 U32 const maxDist = 1U <<
params->windowLog;
435 BYTE const*
const iend = istart + srcSize;
436 size_t const kMaxChunkSize = 1 << 20;
437 size_t const nbChunks = (srcSize / kMaxChunkSize) + ((srcSize % kMaxChunkSize) != 0);
439 size_t leftoverSize = 0;
452 BYTE const*
const chunkStart = istart +
chunk * kMaxChunkSize;
453 size_t const remaining = (
size_t)(iend - chunkStart);
454 BYTE const *
const chunkEnd =
455 (remaining < kMaxChunkSize) ? iend : chunkStart + kMaxChunkSize;
456 size_t const chunkSize = chunkEnd - chunkStart;
457 size_t newLeftoverSize;
460 assert(chunkStart < iend);
463 U32 const ldmHSize = 1U <<
params->hashLog;
465 &ldmState->
window, 0, maxDist, chunkStart);
489 return newLeftoverSize;
495 if (prevSize < sequences->
size) {
496 sequences->seq[prevSize].litLength += (
U32)leftoverSize;
497 leftoverSize = newLeftoverSize;
499 assert(newLeftoverSize == chunkSize);
500 leftoverSize += chunkSize;
507 while (srcSize > 0 && rawSeqStore->
pos < rawSeqStore->
size) {
509 if (srcSize <= seq->litLength) {
516 if (srcSize < seq->matchLength) {
521 if (rawSeqStore->
pos + 1 < rawSeqStore->
size) {
542 U32 const remaining,
U32 const minMatch)
552 if (remaining <=
sequence.litLength) {
556 if (
sequence.matchLength < minMatch) {
567 void const*
src,
size_t srcSize)
569 const ZSTD_compressionParameters*
const cParams = &ms->
cParams;
570 unsigned const minMatch = cParams->minMatch;
575 BYTE const*
const iend = istart + srcSize;
579 DEBUGLOG(5,
"ZSTD_ldm_blockCompress: srcSize=%zu", srcSize);
583 while (rawSeqStore->
pos < rawSeqStore->
size &&
ip < iend) {
586 (
U32)(iend -
ip), minMatch);
598 DEBUGLOG(5,
"pos %u : calling block compressor on segment of size %u", (
unsigned)(
ip-istart),
sequence.litLength);
600 size_t const newLitLength =
601 blockCompressor(ms, seqStore, rep,
ip,
sequence.litLength);
618 return blockCompressor(ms, seqStore, rep,
ip, iend -
ip);
static cab_ULONG checksum(const cab_UBYTE *data, cab_UWORD bytes, cab_ULONG csum)
GLenum const GLfloat * params
GLsizei GLenum const GLvoid GLsizei GLenum GLbyte GLbyte GLbyte GLdouble GLdouble GLdouble GLfloat GLfloat GLfloat GLint GLint GLint GLshort GLshort GLshort GLubyte GLubyte GLubyte GLuint GLuint GLuint GLushort GLushort GLushort GLbyte GLbyte GLbyte GLbyte GLdouble GLdouble GLdouble GLdouble GLfloat GLfloat GLfloat GLfloat GLint GLint GLint GLint GLshort GLshort GLshort GLshort GLubyte GLubyte GLubyte GLubyte GLuint GLuint GLuint GLuint GLushort GLushort GLushort GLushort GLboolean const GLdouble const GLfloat const GLint const GLshort const GLbyte const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLdouble const GLfloat const GLfloat const GLint const GLint const GLshort const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort GLenum GLenum GLenum GLfloat GLenum GLint GLenum GLenum GLenum GLfloat GLenum GLenum GLint GLenum GLfloat GLenum GLint GLint GLushort GLenum GLenum GLfloat GLenum GLenum GLint GLfloat const GLubyte GLenum GLenum GLenum const GLfloat GLenum GLenum const GLint GLenum GLint GLint GLsizei GLsizei GLint GLenum GLenum const GLvoid GLenum GLenum const GLfloat GLenum GLenum const GLint GLenum GLenum const GLdouble GLenum GLenum const GLfloat GLenum GLenum const GLint GLsizei GLuint GLfloat GLuint GLbitfield GLfloat GLint GLuint GLboolean GLenum GLfloat GLenum GLbitfield GLenum GLfloat GLfloat GLint GLint const GLfloat GLenum GLfloat GLfloat GLint GLint GLfloat GLfloat GLint GLint const GLfloat GLint GLfloat GLfloat GLint GLfloat GLfloat GLint GLfloat GLfloat const GLdouble const GLfloat const GLdouble const GLfloat GLint i
GLsizei GLenum const GLvoid GLsizei GLenum GLbyte GLbyte GLbyte GLdouble GLdouble GLdouble GLfloat GLfloat GLfloat GLint GLint GLint GLshort GLshort GLshort GLubyte GLubyte GLubyte GLuint GLuint GLuint GLushort GLushort GLushort GLbyte GLbyte GLbyte GLbyte GLdouble GLdouble GLdouble GLdouble GLfloat GLfloat GLfloat GLfloat GLint GLint GLint GLint GLshort GLshort GLshort GLshort GLubyte GLubyte GLubyte GLubyte GLuint GLuint GLuint GLuint GLushort GLushort GLushort GLushort GLboolean const GLdouble const GLfloat const GLint const GLshort const GLbyte const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLdouble const GLfloat const GLfloat const GLint const GLint const GLshort const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort GLenum GLenum GLenum GLfloat GLenum GLint GLenum GLenum GLenum GLfloat GLenum GLenum GLint GLenum GLfloat GLenum GLint GLint GLushort GLenum GLenum GLfloat GLenum GLenum GLint GLfloat const GLubyte GLenum GLenum GLenum const GLfloat GLenum GLenum const GLint GLenum GLint GLint GLsizei GLsizei GLint GLenum GLenum const GLvoid GLenum GLenum const GLfloat GLenum GLenum const GLint GLenum GLenum const GLdouble GLenum GLenum const GLfloat GLenum GLenum const GLint GLsizei GLuint GLfloat GLuint GLbitfield GLfloat GLint GLuint GLboolean GLenum GLfloat GLenum GLbitfield GLenum GLfloat GLfloat GLint GLint const GLfloat GLenum GLfloat GLfloat GLint GLint GLfloat GLfloat GLint GLint const GLfloat GLint GLfloat GLfloat GLint GLfloat GLfloat GLint GLfloat GLfloat const GLdouble * u
struct task_struct * current
ZSTD_compressionParameters cParams
ZSTD_blockCompressor ZSTD_selectBlockCompressor(ZSTD_strategy strat, ZSTD_dictMode_e dictMode)
MEM_STATIC ZSTD_dictMode_e ZSTD_matchState_dictMode(const ZSTD_matchState_t *ms)
MEM_STATIC U64 ZSTD_rollingHash_compute(void const *buf, size_t size)
MEM_STATIC U64 ZSTD_rollingHash_rotate(U64 hash, BYTE toRemove, BYTE toAdd, U64 primePower)
MEM_STATIC U32 ZSTD_window_needOverflowCorrection(ZSTD_window_t const window, void const *srcEnd)
MEM_STATIC size_t ZSTD_count_2segments(const BYTE *ip, const BYTE *match, const BYTE *iEnd, const BYTE *mEnd, const BYTE *iStart)
#define ZSTD_CHUNKSIZE_MAX
MEM_STATIC U32 ZSTD_window_hasExtDict(ZSTD_window_t const window)
MEM_STATIC size_t ZSTD_count(const BYTE *pIn, const BYTE *pMatch, const BYTE *const pInLimit)
size_t(* ZSTD_blockCompressor)(ZSTD_matchState_t *bs, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], void const *src, size_t srcSize)
MEM_STATIC void ZSTD_window_enforceMaxDist(ZSTD_window_t *window, const void *blockEnd, U32 maxDist, U32 *loadedDictEndPtr, const ZSTD_matchState_t **dictMatchStatePtr)
HINT_INLINE UNUSED_ATTR void ZSTD_storeSeq(seqStore_t *seqStorePtr, size_t litLength, const BYTE *literals, const BYTE *litLimit, U32 offCode, size_t mlBase)
MEM_STATIC U32 ZSTD_window_correctOverflow(ZSTD_window_t *window, U32 cycleLog, U32 maxDist, void const *src)
MEM_STATIC size_t ZSTD_cwksp_alloc_size(size_t size)
void ZSTD_fillDoubleHashTable(ZSTD_matchState_t *ms, void const *end, ZSTD_dictTableLoadMethod_e dtlm)
void ZSTD_fillHashTable(ZSTD_matchState_t *ms, const void *const end, ZSTD_dictTableLoadMethod_e dtlm)
#define ZSTD_STATIC_ASSERT(c)
static U32 ZSTD_ldm_getChecksum(U64 hash, U32 numBitsToDiscard)
void ZSTD_ldm_fillHashTable(ldmState_t *state, const BYTE *ip, const BYTE *iend, ldmParams_t const *params)
static U64 ZSTD_ldm_fillLdmHashTable(ldmState_t *state, U64 lastHash, const BYTE *lastHashed, const BYTE *iend, const BYTE *base, U32 hBits, ldmParams_t const ldmParams)
static ldmEntry_t * ZSTD_ldm_getBucket(ldmState_t *ldmState, size_t hash, ldmParams_t const ldmParams)
static void ZSTD_ldm_reduceTable(ldmEntry_t *const table, U32 const size, U32 const reducerValue)
size_t ZSTD_ldm_generateSequences(ldmState_t *ldmState, rawSeqStore_t *sequences, ldmParams_t const *params, void const *src, size_t srcSize)
static size_t ZSTD_ldm_generateSequences_internal(ldmState_t *ldmState, rawSeqStore_t *rawSeqStore, ldmParams_t const *params, void const *src, size_t srcSize)
void ZSTD_ldm_skipSequences(rawSeqStore_t *rawSeqStore, size_t srcSize, U32 const minMatch)
void ZSTD_ldm_adjustParameters(ldmParams_t *params, ZSTD_compressionParameters const *cParams)
#define LDM_BUCKET_SIZE_LOG
size_t ZSTD_ldm_blockCompress(rawSeqStore_t *rawSeqStore, ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], void const *src, size_t srcSize)
static size_t ZSTD_ldm_fillFastTables(ZSTD_matchState_t *ms, void const *end)
size_t ZSTD_ldm_getMaxNbSeq(ldmParams_t params, size_t maxChunkSize)
static void ZSTD_ldm_makeEntryAndInsertByTag(ldmState_t *ldmState, U64 const rollingHash, U32 const hBits, U32 const offset, ldmParams_t const ldmParams)
static rawSeq maybeSplitSequence(rawSeqStore_t *rawSeqStore, U32 const remaining, U32 const minMatch)
#define LDM_MIN_MATCH_LENGTH
static void ZSTD_ldm_insertEntry(ldmState_t *ldmState, size_t const hash, const ldmEntry_t entry, ldmParams_t const ldmParams)
static size_t ZSTD_ldm_countBackwardsMatch(const BYTE *pIn, const BYTE *pAnchor, const BYTE *pMatch, const BYTE *pBase)
static void ZSTD_ldm_limitTableUpdate(ZSTD_matchState_t *ms, const BYTE *anchor)
static U32 ZSTD_ldm_getTag(U64 hash, U32 hbits, U32 numTagBits)
static U32 ZSTD_ldm_getSmallHash(U64 value, U32 numBits)
size_t ZSTD_ldm_getTableSize(ldmParams_t params)