ReactOS  0.4.15-dev-1018-g0695ecb
zstd_double_fast.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2016-present, Yann Collet, Facebook, Inc.
3  * All rights reserved.
4  *
5  * This source code is licensed under both the BSD-style license (found in the
6  * LICENSE file in the root directory of this source tree) and the GPLv2 (found
7  * in the COPYING file in the root directory of this source tree).
8  * You may select, at your option, one of the above-listed licenses.
9  */
10 
11 #include "zstd_compress_internal.h"
12 #include "zstd_double_fast.h"
13 
14 
16  void const* end, ZSTD_dictTableLoadMethod_e dtlm)
17 {
18  const ZSTD_compressionParameters* const cParams = &ms->cParams;
19  U32* const hashLarge = ms->hashTable;
20  U32 const hBitsL = cParams->hashLog;
21  U32 const mls = cParams->searchLength;
22  U32* const hashSmall = ms->chainTable;
23  U32 const hBitsS = cParams->chainLog;
24  const BYTE* const base = ms->window.base;
25  const BYTE* ip = base + ms->nextToUpdate;
26  const BYTE* const iend = ((const BYTE*)end) - HASH_READ_SIZE;
27  const U32 fastHashFillStep = 3;
28 
29  /* Always insert every fastHashFillStep position into the hash tables.
30  * Insert the other positions into the large hash table if their entry
31  * is empty.
32  */
33  for (; ip + fastHashFillStep - 1 <= iend; ip += fastHashFillStep) {
34  U32 const current = (U32)(ip - base);
35  U32 i;
36  for (i = 0; i < fastHashFillStep; ++i) {
37  size_t const smHash = ZSTD_hashPtr(ip + i, hBitsS, mls);
38  size_t const lgHash = ZSTD_hashPtr(ip + i, hBitsL, 8);
39  if (i == 0)
40  hashSmall[smHash] = current + i;
41  if (i == 0 || hashLarge[lgHash] == 0)
42  hashLarge[lgHash] = current + i;
43  /* Only load extra positions for ZSTD_dtlm_full */
44  if (dtlm == ZSTD_dtlm_fast)
45  break;
46  }
47  }
48 }
49 
50 
53  ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
54  void const* src, size_t srcSize,
55  U32 const mls /* template */, ZSTD_dictMode_e const dictMode)
56 {
57  ZSTD_compressionParameters const* cParams = &ms->cParams;
58  U32* const hashLong = ms->hashTable;
59  const U32 hBitsL = cParams->hashLog;
60  U32* const hashSmall = ms->chainTable;
61  const U32 hBitsS = cParams->chainLog;
62  const BYTE* const base = ms->window.base;
63  const BYTE* const istart = (const BYTE*)src;
64  const BYTE* ip = istart;
65  const BYTE* anchor = istart;
66  const U32 prefixLowestIndex = ms->window.dictLimit;
67  const BYTE* const prefixLowest = base + prefixLowestIndex;
68  const BYTE* const iend = istart + srcSize;
69  const BYTE* const ilimit = iend - HASH_READ_SIZE;
70  U32 offset_1=rep[0], offset_2=rep[1];
71  U32 offsetSaved = 0;
72 
73  const ZSTD_matchState_t* const dms = ms->dictMatchState;
74  const ZSTD_compressionParameters* const dictCParams =
75  dictMode == ZSTD_dictMatchState ?
76  &dms->cParams : NULL;
77  const U32* const dictHashLong = dictMode == ZSTD_dictMatchState ?
78  dms->hashTable : NULL;
79  const U32* const dictHashSmall = dictMode == ZSTD_dictMatchState ?
80  dms->chainTable : NULL;
81  const U32 dictStartIndex = dictMode == ZSTD_dictMatchState ?
82  dms->window.dictLimit : 0;
83  const BYTE* const dictBase = dictMode == ZSTD_dictMatchState ?
84  dms->window.base : NULL;
85  const BYTE* const dictStart = dictMode == ZSTD_dictMatchState ?
86  dictBase + dictStartIndex : NULL;
87  const BYTE* const dictEnd = dictMode == ZSTD_dictMatchState ?
88  dms->window.nextSrc : NULL;
89  const U32 dictIndexDelta = dictMode == ZSTD_dictMatchState ?
90  prefixLowestIndex - (U32)(dictEnd - dictBase) :
91  0;
92  const U32 dictHBitsL = dictMode == ZSTD_dictMatchState ?
93  dictCParams->hashLog : hBitsL;
94  const U32 dictHBitsS = dictMode == ZSTD_dictMatchState ?
95  dictCParams->chainLog : hBitsS;
96  const U32 dictAndPrefixLength = (U32)(ip - prefixLowest + dictEnd - dictStart);
97 
98  assert(dictMode == ZSTD_noDict || dictMode == ZSTD_dictMatchState);
99 
100  /* init */
101  ip += (dictAndPrefixLength == 0);
102  if (dictMode == ZSTD_noDict) {
103  U32 const maxRep = (U32)(ip - prefixLowest);
104  if (offset_2 > maxRep) offsetSaved = offset_2, offset_2 = 0;
105  if (offset_1 > maxRep) offsetSaved = offset_1, offset_1 = 0;
106  }
107  if (dictMode == ZSTD_dictMatchState) {
108  /* dictMatchState repCode checks don't currently handle repCode == 0
109  * disabling. */
110  assert(offset_1 <= dictAndPrefixLength);
111  assert(offset_2 <= dictAndPrefixLength);
112  }
113 
114  /* Main Search Loop */
115  while (ip < ilimit) { /* < instead of <=, because repcode check at (ip+1) */
116  size_t mLength;
117  U32 offset;
118  size_t const h2 = ZSTD_hashPtr(ip, hBitsL, 8);
119  size_t const h = ZSTD_hashPtr(ip, hBitsS, mls);
120  size_t const dictHL = ZSTD_hashPtr(ip, dictHBitsL, 8);
121  size_t const dictHS = ZSTD_hashPtr(ip, dictHBitsS, mls);
122  U32 const current = (U32)(ip-base);
123  U32 const matchIndexL = hashLong[h2];
124  U32 matchIndexS = hashSmall[h];
125  const BYTE* matchLong = base + matchIndexL;
126  const BYTE* match = base + matchIndexS;
127  const U32 repIndex = current + 1 - offset_1;
128  const BYTE* repMatch = (dictMode == ZSTD_dictMatchState
129  && repIndex < prefixLowestIndex) ?
130  dictBase + (repIndex - dictIndexDelta) :
131  base + repIndex;
132  hashLong[h2] = hashSmall[h] = current; /* update hash tables */
133 
134  /* check dictMatchState repcode */
135  if (dictMode == ZSTD_dictMatchState
136  && ((U32)((prefixLowestIndex-1) - repIndex) >= 3 /* intentional underflow */)
137  && (MEM_read32(repMatch) == MEM_read32(ip+1)) ) {
138  const BYTE* repMatchEnd = repIndex < prefixLowestIndex ? dictEnd : iend;
139  mLength = ZSTD_count_2segments(ip+1+4, repMatch+4, iend, repMatchEnd, prefixLowest) + 4;
140  ip++;
141  ZSTD_storeSeq(seqStore, ip-anchor, anchor, 0, mLength-MINMATCH);
142  goto _match_stored;
143  }
144 
145  /* check noDict repcode */
146  if ( dictMode == ZSTD_noDict
147  && ((offset_1 > 0) & (MEM_read32(ip+1-offset_1) == MEM_read32(ip+1)))) {
148  mLength = ZSTD_count(ip+1+4, ip+1+4-offset_1, iend) + 4;
149  ip++;
150  ZSTD_storeSeq(seqStore, ip-anchor, anchor, 0, mLength-MINMATCH);
151  goto _match_stored;
152  }
153 
154  if (matchIndexL > prefixLowestIndex) {
155  /* check prefix long match */
156  if (MEM_read64(matchLong) == MEM_read64(ip)) {
157  mLength = ZSTD_count(ip+8, matchLong+8, iend) + 8;
158  offset = (U32)(ip-matchLong);
159  while (((ip>anchor) & (matchLong>prefixLowest)) && (ip[-1] == matchLong[-1])) { ip--; matchLong--; mLength++; } /* catch up */
160  goto _match_found;
161  }
162  } else if (dictMode == ZSTD_dictMatchState) {
163  /* check dictMatchState long match */
164  U32 const dictMatchIndexL = dictHashLong[dictHL];
165  const BYTE* dictMatchL = dictBase + dictMatchIndexL;
166  assert(dictMatchL < dictEnd);
167 
168  if (dictMatchL > dictStart && MEM_read64(dictMatchL) == MEM_read64(ip)) {
169  mLength = ZSTD_count_2segments(ip+8, dictMatchL+8, iend, dictEnd, prefixLowest) + 8;
170  offset = (U32)(current - dictMatchIndexL - dictIndexDelta);
171  while (((ip>anchor) & (dictMatchL>dictStart)) && (ip[-1] == dictMatchL[-1])) { ip--; dictMatchL--; mLength++; } /* catch up */
172  goto _match_found;
173  }
174  }
175 
176  if (matchIndexS > prefixLowestIndex) {
177  /* check prefix short match */
178  if (MEM_read32(match) == MEM_read32(ip)) {
179  goto _search_next_long;
180  }
181  } else if (dictMode == ZSTD_dictMatchState) {
182  /* check dictMatchState short match */
183  U32 const dictMatchIndexS = dictHashSmall[dictHS];
184  match = dictBase + dictMatchIndexS;
185  matchIndexS = dictMatchIndexS + dictIndexDelta;
186 
187  if (match > dictStart && MEM_read32(match) == MEM_read32(ip)) {
188  goto _search_next_long;
189  }
190  }
191 
192  ip += ((ip-anchor) >> kSearchStrength) + 1;
193  continue;
194 
195 _search_next_long:
196 
197  {
198  size_t const hl3 = ZSTD_hashPtr(ip+1, hBitsL, 8);
199  size_t const dictHLNext = ZSTD_hashPtr(ip+1, dictHBitsL, 8);
200  U32 const matchIndexL3 = hashLong[hl3];
201  const BYTE* matchL3 = base + matchIndexL3;
202  hashLong[hl3] = current + 1;
203 
204  /* check prefix long +1 match */
205  if (matchIndexL3 > prefixLowestIndex) {
206  if (MEM_read64(matchL3) == MEM_read64(ip+1)) {
207  mLength = ZSTD_count(ip+9, matchL3+8, iend) + 8;
208  ip++;
209  offset = (U32)(ip-matchL3);
210  while (((ip>anchor) & (matchL3>prefixLowest)) && (ip[-1] == matchL3[-1])) { ip--; matchL3--; mLength++; } /* catch up */
211  goto _match_found;
212  }
213  } else if (dictMode == ZSTD_dictMatchState) {
214  /* check dict long +1 match */
215  U32 const dictMatchIndexL3 = dictHashLong[dictHLNext];
216  const BYTE* dictMatchL3 = dictBase + dictMatchIndexL3;
217  assert(dictMatchL3 < dictEnd);
218  if (dictMatchL3 > dictStart && MEM_read64(dictMatchL3) == MEM_read64(ip+1)) {
219  mLength = ZSTD_count_2segments(ip+1+8, dictMatchL3+8, iend, dictEnd, prefixLowest) + 8;
220  ip++;
221  offset = (U32)(current + 1 - dictMatchIndexL3 - dictIndexDelta);
222  while (((ip>anchor) & (dictMatchL3>dictStart)) && (ip[-1] == dictMatchL3[-1])) { ip--; dictMatchL3--; mLength++; } /* catch up */
223  goto _match_found;
224  }
225  }
226  }
227 
228  /* if no long +1 match, explore the short match we found */
229  if (dictMode == ZSTD_dictMatchState && matchIndexS < prefixLowestIndex) {
230  mLength = ZSTD_count_2segments(ip+4, match+4, iend, dictEnd, prefixLowest) + 4;
231  offset = (U32)(current - matchIndexS);
232  while (((ip>anchor) & (match>dictStart)) && (ip[-1] == match[-1])) { ip--; match--; mLength++; } /* catch up */
233  } else {
234  mLength = ZSTD_count(ip+4, match+4, iend) + 4;
235  offset = (U32)(ip - match);
236  while (((ip>anchor) & (match>prefixLowest)) && (ip[-1] == match[-1])) { ip--; match--; mLength++; } /* catch up */
237  }
238 
239  /* fall-through */
240 
241 _match_found:
242  offset_2 = offset_1;
243  offset_1 = offset;
244 
245  ZSTD_storeSeq(seqStore, ip-anchor, anchor, offset + ZSTD_REP_MOVE, mLength-MINMATCH);
246 
247 _match_stored:
248  /* match found */
249  ip += mLength;
250  anchor = ip;
251 
252  if (ip <= ilimit) {
253  /* Fill Table */
254  hashLong[ZSTD_hashPtr(base+current+2, hBitsL, 8)] =
255  hashSmall[ZSTD_hashPtr(base+current+2, hBitsS, mls)] = current+2; /* here because current+2 could be > iend-8 */
256  hashLong[ZSTD_hashPtr(ip-2, hBitsL, 8)] =
257  hashSmall[ZSTD_hashPtr(ip-2, hBitsS, mls)] = (U32)(ip-2-base);
258 
259  /* check immediate repcode */
260  if (dictMode == ZSTD_dictMatchState) {
261  while (ip <= ilimit) {
262  U32 const current2 = (U32)(ip-base);
263  U32 const repIndex2 = current2 - offset_2;
264  const BYTE* repMatch2 = dictMode == ZSTD_dictMatchState
265  && repIndex2 < prefixLowestIndex ?
266  dictBase - dictIndexDelta + repIndex2 :
267  base + repIndex2;
268  if ( ((U32)((prefixLowestIndex-1) - (U32)repIndex2) >= 3 /* intentional overflow */)
269  && (MEM_read32(repMatch2) == MEM_read32(ip)) ) {
270  const BYTE* const repEnd2 = repIndex2 < prefixLowestIndex ? dictEnd : iend;
271  size_t const repLength2 = ZSTD_count_2segments(ip+4, repMatch2+4, iend, repEnd2, prefixLowest) + 4;
272  U32 tmpOffset = offset_2; offset_2 = offset_1; offset_1 = tmpOffset; /* swap offset_2 <=> offset_1 */
273  ZSTD_storeSeq(seqStore, 0, anchor, 0, repLength2-MINMATCH);
274  hashSmall[ZSTD_hashPtr(ip, hBitsS, mls)] = current2;
275  hashLong[ZSTD_hashPtr(ip, hBitsL, 8)] = current2;
276  ip += repLength2;
277  anchor = ip;
278  continue;
279  }
280  break;
281  }
282  }
283 
284  if (dictMode == ZSTD_noDict) {
285  while ( (ip <= ilimit)
286  && ( (offset_2>0)
287  & (MEM_read32(ip) == MEM_read32(ip - offset_2)) )) {
288  /* store sequence */
289  size_t const rLength = ZSTD_count(ip+4, ip+4-offset_2, iend) + 4;
290  U32 const tmpOff = offset_2; offset_2 = offset_1; offset_1 = tmpOff; /* swap offset_2 <=> offset_1 */
291  hashSmall[ZSTD_hashPtr(ip, hBitsS, mls)] = (U32)(ip-base);
292  hashLong[ZSTD_hashPtr(ip, hBitsL, 8)] = (U32)(ip-base);
293  ZSTD_storeSeq(seqStore, 0, anchor, 0, rLength-MINMATCH);
294  ip += rLength;
295  anchor = ip;
296  continue; /* faster when present ... (?) */
297  } } } }
298 
299  /* save reps for next block */
300  rep[0] = offset_1 ? offset_1 : offsetSaved;
301  rep[1] = offset_2 ? offset_2 : offsetSaved;
302 
303  /* Return the last literals size */
304  return iend - anchor;
305 }
306 
307 
309  ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
310  void const* src, size_t srcSize)
311 {
312  const U32 mls = ms->cParams.searchLength;
313  switch(mls)
314  {
315  default: /* includes case 3 */
316  case 4 :
317  return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 4, ZSTD_noDict);
318  case 5 :
319  return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 5, ZSTD_noDict);
320  case 6 :
321  return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 6, ZSTD_noDict);
322  case 7 :
323  return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 7, ZSTD_noDict);
324  }
325 }
326 
327 
329  ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
330  void const* src, size_t srcSize)
331 {
332  const U32 mls = ms->cParams.searchLength;
333  switch(mls)
334  {
335  default: /* includes case 3 */
336  case 4 :
337  return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 4, ZSTD_dictMatchState);
338  case 5 :
339  return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 5, ZSTD_dictMatchState);
340  case 6 :
341  return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 6, ZSTD_dictMatchState);
342  case 7 :
343  return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 7, ZSTD_dictMatchState);
344  }
345 }
346 
347 
349  ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
350  void const* src, size_t srcSize,
351  U32 const mls /* template */)
352 {
353  ZSTD_compressionParameters const* cParams = &ms->cParams;
354  U32* const hashLong = ms->hashTable;
355  U32 const hBitsL = cParams->hashLog;
356  U32* const hashSmall = ms->chainTable;
357  U32 const hBitsS = cParams->chainLog;
358  const BYTE* const istart = (const BYTE*)src;
359  const BYTE* ip = istart;
360  const BYTE* anchor = istart;
361  const BYTE* const iend = istart + srcSize;
362  const BYTE* const ilimit = iend - 8;
363  const U32 prefixStartIndex = ms->window.dictLimit;
364  const BYTE* const base = ms->window.base;
365  const BYTE* const prefixStart = base + prefixStartIndex;
366  const U32 dictStartIndex = ms->window.lowLimit;
367  const BYTE* const dictBase = ms->window.dictBase;
368  const BYTE* const dictStart = dictBase + dictStartIndex;
369  const BYTE* const dictEnd = dictBase + prefixStartIndex;
370  U32 offset_1=rep[0], offset_2=rep[1];
371 
372  DEBUGLOG(5, "ZSTD_compressBlock_doubleFast_extDict_generic (srcSize=%zu)", srcSize);
373 
374  /* Search Loop */
375  while (ip < ilimit) { /* < instead of <=, because (ip+1) */
376  const size_t hSmall = ZSTD_hashPtr(ip, hBitsS, mls);
377  const U32 matchIndex = hashSmall[hSmall];
378  const BYTE* const matchBase = matchIndex < prefixStartIndex ? dictBase : base;
379  const BYTE* match = matchBase + matchIndex;
380 
381  const size_t hLong = ZSTD_hashPtr(ip, hBitsL, 8);
382  const U32 matchLongIndex = hashLong[hLong];
383  const BYTE* const matchLongBase = matchLongIndex < prefixStartIndex ? dictBase : base;
384  const BYTE* matchLong = matchLongBase + matchLongIndex;
385 
386  const U32 current = (U32)(ip-base);
387  const U32 repIndex = current + 1 - offset_1; /* offset_1 expected <= current +1 */
388  const BYTE* const repBase = repIndex < prefixStartIndex ? dictBase : base;
389  const BYTE* const repMatch = repBase + repIndex;
390  size_t mLength;
391  hashSmall[hSmall] = hashLong[hLong] = current; /* update hash table */
392 
393  if ((((U32)((prefixStartIndex-1) - repIndex) >= 3) /* intentional underflow : ensure repIndex doesn't overlap dict + prefix */
394  & (repIndex > dictStartIndex))
395  && (MEM_read32(repMatch) == MEM_read32(ip+1)) ) {
396  const BYTE* repMatchEnd = repIndex < prefixStartIndex ? dictEnd : iend;
397  mLength = ZSTD_count_2segments(ip+1+4, repMatch+4, iend, repMatchEnd, prefixStart) + 4;
398  ip++;
399  ZSTD_storeSeq(seqStore, ip-anchor, anchor, 0, mLength-MINMATCH);
400  } else {
401  if ((matchLongIndex > dictStartIndex) && (MEM_read64(matchLong) == MEM_read64(ip))) {
402  const BYTE* const matchEnd = matchLongIndex < prefixStartIndex ? dictEnd : iend;
403  const BYTE* const lowMatchPtr = matchLongIndex < prefixStartIndex ? dictStart : prefixStart;
404  U32 offset;
405  mLength = ZSTD_count_2segments(ip+8, matchLong+8, iend, matchEnd, prefixStart) + 8;
406  offset = current - matchLongIndex;
407  while (((ip>anchor) & (matchLong>lowMatchPtr)) && (ip[-1] == matchLong[-1])) { ip--; matchLong--; mLength++; } /* catch up */
408  offset_2 = offset_1;
409  offset_1 = offset;
410  ZSTD_storeSeq(seqStore, ip-anchor, anchor, offset + ZSTD_REP_MOVE, mLength-MINMATCH);
411 
412  } else if ((matchIndex > dictStartIndex) && (MEM_read32(match) == MEM_read32(ip))) {
413  size_t const h3 = ZSTD_hashPtr(ip+1, hBitsL, 8);
414  U32 const matchIndex3 = hashLong[h3];
415  const BYTE* const match3Base = matchIndex3 < prefixStartIndex ? dictBase : base;
416  const BYTE* match3 = match3Base + matchIndex3;
417  U32 offset;
418  hashLong[h3] = current + 1;
419  if ( (matchIndex3 > dictStartIndex) && (MEM_read64(match3) == MEM_read64(ip+1)) ) {
420  const BYTE* const matchEnd = matchIndex3 < prefixStartIndex ? dictEnd : iend;
421  const BYTE* const lowMatchPtr = matchIndex3 < prefixStartIndex ? dictStart : prefixStart;
422  mLength = ZSTD_count_2segments(ip+9, match3+8, iend, matchEnd, prefixStart) + 8;
423  ip++;
424  offset = current+1 - matchIndex3;
425  while (((ip>anchor) & (match3>lowMatchPtr)) && (ip[-1] == match3[-1])) { ip--; match3--; mLength++; } /* catch up */
426  } else {
427  const BYTE* const matchEnd = matchIndex < prefixStartIndex ? dictEnd : iend;
428  const BYTE* const lowMatchPtr = matchIndex < prefixStartIndex ? dictStart : prefixStart;
429  mLength = ZSTD_count_2segments(ip+4, match+4, iend, matchEnd, prefixStart) + 4;
430  offset = current - matchIndex;
431  while (((ip>anchor) & (match>lowMatchPtr)) && (ip[-1] == match[-1])) { ip--; match--; mLength++; } /* catch up */
432  }
433  offset_2 = offset_1;
434  offset_1 = offset;
435  ZSTD_storeSeq(seqStore, ip-anchor, anchor, offset + ZSTD_REP_MOVE, mLength-MINMATCH);
436 
437  } else {
438  ip += ((ip-anchor) >> kSearchStrength) + 1;
439  continue;
440  } }
441 
442  /* found a match : store it */
443  ip += mLength;
444  anchor = ip;
445 
446  if (ip <= ilimit) {
447  /* Fill Table */
448  hashSmall[ZSTD_hashPtr(base+current+2, hBitsS, mls)] = current+2;
449  hashLong[ZSTD_hashPtr(base+current+2, hBitsL, 8)] = current+2;
450  hashSmall[ZSTD_hashPtr(ip-2, hBitsS, mls)] = (U32)(ip-2-base);
451  hashLong[ZSTD_hashPtr(ip-2, hBitsL, 8)] = (U32)(ip-2-base);
452  /* check immediate repcode */
453  while (ip <= ilimit) {
454  U32 const current2 = (U32)(ip-base);
455  U32 const repIndex2 = current2 - offset_2;
456  const BYTE* repMatch2 = repIndex2 < prefixStartIndex ? dictBase + repIndex2 : base + repIndex2;
457  if ( (((U32)((prefixStartIndex-1) - repIndex2) >= 3) /* intentional overflow : ensure repIndex2 doesn't overlap dict + prefix */
458  & (repIndex2 > dictStartIndex))
459  && (MEM_read32(repMatch2) == MEM_read32(ip)) ) {
460  const BYTE* const repEnd2 = repIndex2 < prefixStartIndex ? dictEnd : iend;
461  size_t const repLength2 = ZSTD_count_2segments(ip+4, repMatch2+4, iend, repEnd2, prefixStart) + 4;
462  U32 const tmpOffset = offset_2; offset_2 = offset_1; offset_1 = tmpOffset; /* swap offset_2 <=> offset_1 */
463  ZSTD_storeSeq(seqStore, 0, anchor, 0, repLength2-MINMATCH);
464  hashSmall[ZSTD_hashPtr(ip, hBitsS, mls)] = current2;
465  hashLong[ZSTD_hashPtr(ip, hBitsL, 8)] = current2;
466  ip += repLength2;
467  anchor = ip;
468  continue;
469  }
470  break;
471  } } }
472 
473  /* save reps for next block */
474  rep[0] = offset_1;
475  rep[1] = offset_2;
476 
477  /* Return the last literals size */
478  return iend - anchor;
479 }
480 
481 
483  ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
484  void const* src, size_t srcSize)
485 {
486  U32 const mls = ms->cParams.searchLength;
487  switch(mls)
488  {
489  default: /* includes case 3 */
490  case 4 :
491  return ZSTD_compressBlock_doubleFast_extDict_generic(ms, seqStore, rep, src, srcSize, 4);
492  case 5 :
493  return ZSTD_compressBlock_doubleFast_extDict_generic(ms, seqStore, rep, src, srcSize, 5);
494  case 6 :
495  return ZSTD_compressBlock_doubleFast_extDict_generic(ms, seqStore, rep, src, srcSize, 6);
496  case 7 :
497  return ZSTD_compressBlock_doubleFast_extDict_generic(ms, seqStore, rep, src, srcSize, 7);
498  }
499 }
#define kSearchStrength
Definition: match.c:28
GLintptr offset
Definition: glext.h:5920
#define assert(x)
Definition: debug.h:53
GLuint GLuint end
Definition: gl.h:1545
size_t ZSTD_compressBlock_doubleFast_extDict(ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], void const *src, size_t srcSize)
void mls(int argc, const char *argv[])
Definition: cmds.c:1168
GLfloat GLfloat GLfloat GLfloat h
Definition: glext.h:7723
MEM_STATIC U64 MEM_read64(const void *memPtr)
Definition: mem.h:174
#define DEBUGLOG(l,...)
Definition: debug.h:115
GLsizei GLenum const GLvoid GLsizei GLenum GLbyte GLbyte GLbyte GLdouble GLdouble GLdouble GLfloat GLfloat GLfloat GLint GLint GLint GLshort GLshort GLshort GLubyte GLubyte GLubyte GLuint GLuint GLuint GLushort GLushort GLushort GLbyte GLbyte GLbyte GLbyte GLdouble GLdouble GLdouble GLdouble GLfloat GLfloat GLfloat GLfloat GLint GLint GLint GLint GLshort GLshort GLshort GLshort GLubyte GLubyte GLubyte GLubyte GLuint GLuint GLuint GLuint GLushort GLushort GLushort GLushort GLboolean const GLdouble const GLfloat const GLint const GLshort const GLbyte const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLdouble const GLfloat const GLfloat const GLint const GLint const GLshort const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort GLenum GLenum GLenum GLfloat GLenum GLint GLenum GLenum GLenum GLfloat GLenum GLenum GLint GLenum GLfloat GLenum GLint GLint GLushort GLenum GLenum GLfloat GLenum GLenum GLint GLfloat const GLubyte GLenum GLenum GLenum const GLfloat GLenum GLenum const GLint GLenum GLint GLint GLsizei GLsizei GLint GLenum GLenum const GLvoid GLenum GLenum const GLfloat GLenum GLenum const GLint GLenum GLenum const GLdouble GLenum GLenum const GLfloat GLenum GLenum const GLint GLsizei GLuint GLfloat GLuint GLbitfield GLfloat GLint GLuint GLboolean GLenum GLfloat GLenum GLbitfield GLenum GLfloat GLfloat GLint GLint const GLfloat GLenum GLfloat GLfloat GLint GLint GLfloat GLfloat GLint GLint const GLfloat GLint GLfloat GLfloat GLint GLfloat GLfloat GLint GLfloat GLfloat const GLdouble const GLfloat const GLdouble const GLfloat GLint i
Definition: glfuncs.h:248
GLuint base
Definition: 3dtext.c:35
int ip[4]
Definition: rtl.c:1176
ZSTD_compressionParameters cParams
smooth NULL
Definition: ftsmooth.c:416
MEM_STATIC size_t ZSTD_count(const BYTE *pIn, const BYTE *pMatch, const BYTE *const pInLimit)
MEM_STATIC size_t ZSTD_count_2segments(const BYTE *ip, const BYTE *match, const BYTE *iEnd, const BYTE *mEnd, const BYTE *iStart)
#define ZSTD_REP_MOVE
Definition: zstd_internal.h:63
Definition: dhcpd.h:61
MEM_STATIC void ZSTD_storeSeq(seqStore_t *seqStorePtr, size_t litLength, const void *literals, U32 offsetCode, size_t mlBase)
const ZSTD_matchState_t * dictMatchState
size_t ZSTD_compressBlock_doubleFast_dictMatchState(ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], void const *src, size_t srcSize)
MEM_STATIC U32 MEM_read32(const void *memPtr)
Definition: mem.h:169
#define HASH_READ_SIZE
GLenum src
Definition: glext.h:6340
FORCE_INLINE_TEMPLATE size_t ZSTD_compressBlock_doubleFast_generic(ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], void const *src, size_t srcSize, U32 const mls, ZSTD_dictMode_e const dictMode)
void ZSTD_fillDoubleHashTable(ZSTD_matchState_t *ms, void const *end, ZSTD_dictTableLoadMethod_e dtlm)
unsigned char BYTE
Definition: xxhash.c:193
ZSTD_dictTableLoadMethod_e
MEM_STATIC size_t ZSTD_hashPtr(const void *p, U32 hBits, U32 mls)
#define MINMATCH
Definition: zstd_internal.h:96
size_t ZSTD_compressBlock_doubleFast(ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], void const *src, size_t srcSize)
static size_t ZSTD_compressBlock_doubleFast_extDict_generic(ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], void const *src, size_t srcSize, U32 const mls)
unsigned int U32
Definition: xxhash.c:195
#define ZSTD_REP_NUM
Definition: zstd_internal.h:62
struct task_struct * current
Definition: linux.c:32
#define FORCE_INLINE_TEMPLATE
Definition: xxhash.c:172