ReactOS 0.4.16-dev-122-g325d74c
zstd_opt.c File Reference
#include "zstd_compress_internal.h"
#include "hist.h"
#include "zstd_opt.h"
Include dependency graph for zstd_opt.c:

Go to the source code of this file.

Macros

#define ZSTD_LITFREQ_ADD   2 /* scaling factor for litFreq, so that frequencies adapt faster to new stats */
 
#define ZSTD_FREQ_DIV   4 /* log factor when using previous stats to init next stats */
 
#define ZSTD_MAX_PRICE   (1<<30)
 
#define ZSTD_PREDEF_THRESHOLD   1024 /* if srcSize < ZSTD_PREDEF_THRESHOLD, symbols' cost is assumed static, directly determined by pre-defined distributions */
 
#define BITCOST_ACCURACY   8
 
#define BITCOST_MULTIPLIER   (1 << BITCOST_ACCURACY)
 
#define WEIGHT(stat, opt)   (opt ? ZSTD_fracWeight(stat) : ZSTD_bitWeight(stat))
 

Functions

MEM_STATIC U32 ZSTD_bitWeight (U32 stat)
 
MEM_STATIC U32 ZSTD_fracWeight (U32 rawStat)
 
static int ZSTD_compressedLiterals (optState_t const *const optPtr)
 
static void ZSTD_setBasePrices (optState_t *optPtr, int optLevel)
 
static U32 ZSTD_downscaleStat (unsigned *table, U32 lastEltIndex, int malus)
 
static void ZSTD_rescaleFreqs (optState_t *const optPtr, const BYTE *const src, size_t const srcSize, int const optLevel)
 
static U32 ZSTD_rawLiteralsCost (const BYTE *const literals, U32 const litLength, const optState_t *const optPtr, int optLevel)
 
static U32 ZSTD_litLengthPrice (U32 const litLength, const optState_t *const optPtr, int optLevel)
 
FORCE_INLINE_TEMPLATE U32 ZSTD_getMatchPrice (U32 const offset, U32 const matchLength, const optState_t *const optPtr, int const optLevel)
 
static void ZSTD_updateStats (optState_t *const optPtr, U32 litLength, const BYTE *literals, U32 offsetCode, U32 matchLength)
 
MEM_STATIC U32 ZSTD_readMINMATCH (const void *memPtr, U32 length)
 
static U32 ZSTD_insertAndFindFirstIndexHash3 (ZSTD_matchState_t *ms, U32 *nextToUpdate3, const BYTE *const ip)
 
static U32 ZSTD_insertBt1 (ZSTD_matchState_t *ms, const BYTE *const ip, const BYTE *const iend, U32 const mls, const int extDict)
 
FORCE_INLINE_TEMPLATE void ZSTD_updateTree_internal (ZSTD_matchState_t *ms, const BYTE *const ip, const BYTE *const iend, const U32 mls, const ZSTD_dictMode_e dictMode)
 
void ZSTD_updateTree (ZSTD_matchState_t *ms, const BYTE *ip, const BYTE *iend)
 
FORCE_INLINE_TEMPLATE U32 ZSTD_insertBtAndGetAllMatches (ZSTD_match_t *matches, ZSTD_matchState_t *ms, U32 *nextToUpdate3, const BYTE *const ip, const BYTE *const iLimit, const ZSTD_dictMode_e dictMode, const U32 rep[ZSTD_REP_NUM], U32 const ll0, const U32 lengthToBeat, U32 const mls)
 
FORCE_INLINE_TEMPLATE U32 ZSTD_BtGetAllMatches (ZSTD_match_t *matches, ZSTD_matchState_t *ms, U32 *nextToUpdate3, const BYTE *ip, const BYTE *const iHighLimit, const ZSTD_dictMode_e dictMode, const U32 rep[ZSTD_REP_NUM], U32 const ll0, U32 const lengthToBeat)
 
static U32 ZSTD_totalLen (ZSTD_optimal_t sol)
 
FORCE_INLINE_TEMPLATE size_t ZSTD_compressBlock_opt_generic (ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize, const int optLevel, const ZSTD_dictMode_e dictMode)
 
size_t ZSTD_compressBlock_btopt (ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize)
 
static U32 ZSTD_upscaleStat (unsigned *table, U32 lastEltIndex, int bonus)
 
MEM_STATIC void ZSTD_upscaleStats (optState_t *optPtr)
 
static void ZSTD_initStats_ultra (ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize)
 
size_t ZSTD_compressBlock_btultra (ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize)
 
size_t ZSTD_compressBlock_btultra2 (ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize)
 
size_t ZSTD_compressBlock_btopt_dictMatchState (ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize)
 
size_t ZSTD_compressBlock_btultra_dictMatchState (ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize)
 
size_t ZSTD_compressBlock_btopt_extDict (ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize)
 
size_t ZSTD_compressBlock_btultra_extDict (ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize)
 

Macro Definition Documentation

◆ BITCOST_ACCURACY

#define BITCOST_ACCURACY   8

Definition at line 36 of file zstd_opt.c.

◆ BITCOST_MULTIPLIER

#define BITCOST_MULTIPLIER   (1 << BITCOST_ACCURACY)

Definition at line 37 of file zstd_opt.c.

◆ WEIGHT

#define WEIGHT (   stat,
  opt 
)    (opt ? ZSTD_fracWeight(stat) : ZSTD_bitWeight(stat))

Definition at line 38 of file zstd_opt.c.

◆ ZSTD_FREQ_DIV

#define ZSTD_FREQ_DIV   4 /* log factor when using previous stats to init next stats */

Definition at line 17 of file zstd_opt.c.

◆ ZSTD_LITFREQ_ADD

#define ZSTD_LITFREQ_ADD   2 /* scaling factor for litFreq, so that frequencies adapt faster to new stats */

Definition at line 16 of file zstd_opt.c.

◆ ZSTD_MAX_PRICE

#define ZSTD_MAX_PRICE   (1<<30)

Definition at line 18 of file zstd_opt.c.

◆ ZSTD_PREDEF_THRESHOLD

#define ZSTD_PREDEF_THRESHOLD   1024 /* if srcSize < ZSTD_PREDEF_THRESHOLD, symbols' cost is assumed static, directly determined by pre-defined distributions */

Definition at line 20 of file zstd_opt.c.

Function Documentation

◆ ZSTD_bitWeight()

MEM_STATIC U32 ZSTD_bitWeight ( U32  stat)

Definition at line 41 of file zstd_opt.c.

42{
44}
Definition: stat.h:55
MEM_STATIC U32 ZSTD_highbit32(U32 val)
#define BITCOST_MULTIPLIER
Definition: zstd_opt.c:37

◆ ZSTD_BtGetAllMatches()

FORCE_INLINE_TEMPLATE U32 ZSTD_BtGetAllMatches ( ZSTD_match_t matches,
ZSTD_matchState_t ms,
U32 nextToUpdate3,
const BYTE ip,
const BYTE *const  iHighLimit,
const ZSTD_dictMode_e  dictMode,
const U32  rep[ZSTD_REP_NUM],
U32 const  ll0,
U32 const  lengthToBeat 
)

Definition at line 742 of file zstd_opt.c.

750{
751 const ZSTD_compressionParameters* const cParams = &ms->cParams;
752 U32 const matchLengthSearch = cParams->minMatch;
753 DEBUGLOG(8, "ZSTD_BtGetAllMatches");
754 if (ip < ms->window.base + ms->nextToUpdate) return 0; /* skipped area */
755 ZSTD_updateTree_internal(ms, ip, iHighLimit, matchLengthSearch, dictMode);
756 switch(matchLengthSearch)
757 {
758 case 3 : return ZSTD_insertBtAndGetAllMatches(matches, ms, nextToUpdate3, ip, iHighLimit, dictMode, rep, ll0, lengthToBeat, 3);
759 default :
760 case 4 : return ZSTD_insertBtAndGetAllMatches(matches, ms, nextToUpdate3, ip, iHighLimit, dictMode, rep, ll0, lengthToBeat, 4);
761 case 5 : return ZSTD_insertBtAndGetAllMatches(matches, ms, nextToUpdate3, ip, iHighLimit, dictMode, rep, ll0, lengthToBeat, 5);
762 case 7 :
763 case 6 : return ZSTD_insertBtAndGetAllMatches(matches, ms, nextToUpdate3, ip, iHighLimit, dictMode, rep, ll0, lengthToBeat, 6);
764 }
765}
#define DEBUGLOG(l,...)
Definition: debug.h:106
#define matches(FN)
Definition: match.h:70
static IHTMLWindow2 * window
Definition: events.c:77
ZSTD_compressionParameters cParams
Definition: dhcpd.h:62
unsigned int U32
Definition: xxhash.c:195
FORCE_INLINE_TEMPLATE U32 ZSTD_insertBtAndGetAllMatches(ZSTD_match_t *matches, ZSTD_matchState_t *ms, U32 *nextToUpdate3, const BYTE *const ip, const BYTE *const iLimit, const ZSTD_dictMode_e dictMode, const U32 rep[ZSTD_REP_NUM], U32 const ll0, const U32 lengthToBeat, U32 const mls)
Definition: zstd_opt.c:509
FORCE_INLINE_TEMPLATE void ZSTD_updateTree_internal(ZSTD_matchState_t *ms, const BYTE *const ip, const BYTE *const iend, const U32 mls, const ZSTD_dictMode_e dictMode)
Definition: zstd_opt.c:483

Referenced by ZSTD_compressBlock_opt_generic().

◆ ZSTD_compressBlock_btopt()

size_t ZSTD_compressBlock_btopt ( ZSTD_matchState_t ms,
seqStore_t seqStore,
U32  rep[ZSTD_REP_NUM],
const void src,
size_t  srcSize 
)

Definition at line 1069 of file zstd_opt.c.

1072{
1073 DEBUGLOG(5, "ZSTD_compressBlock_btopt");
1074 return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 0 /*optLevel*/, ZSTD_noDict);
1075}
GLenum src
Definition: glext.h:6340
FORCE_INLINE_TEMPLATE size_t ZSTD_compressBlock_opt_generic(ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize, const int optLevel, const ZSTD_dictMode_e dictMode)
Definition: zstd_opt.c:796

Referenced by ZSTD_selectBlockCompressor().

◆ ZSTD_compressBlock_btopt_dictMatchState()

size_t ZSTD_compressBlock_btopt_dictMatchState ( ZSTD_matchState_t ms,
seqStore_t seqStore,
U32  rep[ZSTD_REP_NUM],
const void src,
size_t  srcSize 
)

Definition at line 1170 of file zstd_opt.c.

1173{
1174 return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 0 /*optLevel*/, ZSTD_dictMatchState);
1175}
@ ZSTD_dictMatchState

Referenced by ZSTD_selectBlockCompressor().

◆ ZSTD_compressBlock_btopt_extDict()

size_t ZSTD_compressBlock_btopt_extDict ( ZSTD_matchState_t ms,
seqStore_t seqStore,
U32  rep[ZSTD_REP_NUM],
const void src,
size_t  srcSize 
)

Definition at line 1184 of file zstd_opt.c.

1187{
1188 return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 0 /*optLevel*/, ZSTD_extDict);
1189}

Referenced by ZSTD_selectBlockCompressor().

◆ ZSTD_compressBlock_btultra()

size_t ZSTD_compressBlock_btultra ( ZSTD_matchState_t ms,
seqStore_t seqStore,
U32  rep[ZSTD_REP_NUM],
const void src,
size_t  srcSize 
)

Definition at line 1134 of file zstd_opt.c.

1137{
1138 DEBUGLOG(5, "ZSTD_compressBlock_btultra (srcSize=%zu)", srcSize);
1139 return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 2 /*optLevel*/, ZSTD_noDict);
1140}

Referenced by ZSTD_selectBlockCompressor().

◆ ZSTD_compressBlock_btultra2()

size_t ZSTD_compressBlock_btultra2 ( ZSTD_matchState_t ms,
seqStore_t seqStore,
U32  rep[ZSTD_REP_NUM],
const void src,
size_t  srcSize 
)

Definition at line 1142 of file zstd_opt.c.

1145{
1146 U32 const current = (U32)((const BYTE*)src - ms->window.base);
1147 DEBUGLOG(5, "ZSTD_compressBlock_btultra2 (srcSize=%zu)", srcSize);
1148
1149 /* 2-pass strategy:
1150 * this strategy makes a first pass over first block to collect statistics
1151 * and seed next round's statistics with it.
1152 * After 1st pass, function forgets everything, and starts a new block.
1153 * Consequently, this can only work if no data has been previously loaded in tables,
1154 * aka, no dictionary, no prefix, no ldm preprocessing.
1155 * The compression ratio gain is generally small (~0.5% on first block),
1156 * the cost is 2x cpu time on first block. */
1157 assert(srcSize <= ZSTD_BLOCKSIZE_MAX);
1158 if ( (ms->opt.litLengthSum==0) /* first block */
1159 && (seqStore->sequences == seqStore->sequencesStart) /* no ldm */
1160 && (ms->window.dictLimit == ms->window.lowLimit) /* no dictionary */
1161 && (current == ms->window.dictLimit) /* start of frame, nothing already loaded nor skipped */
1162 && (srcSize > ZSTD_PREDEF_THRESHOLD)
1163 ) {
1164 ZSTD_initStats_ultra(ms, seqStore, rep, src, srcSize);
1165 }
1166
1167 return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 2 /*optLevel*/, ZSTD_noDict);
1168}
#define assert(x)
Definition: debug.h:53
struct task_struct * current
Definition: linux.c:32
seqDef * sequencesStart
seqDef * sequences
unsigned char BYTE
Definition: xxhash.c:193
#define ZSTD_BLOCKSIZE_MAX
Definition: zstd.h:104
static void ZSTD_initStats_ultra(ZSTD_matchState_t *ms, seqStore_t *seqStore, U32 rep[ZSTD_REP_NUM], const void *src, size_t srcSize)
Definition: zstd_opt.c:1107
#define ZSTD_PREDEF_THRESHOLD
Definition: zstd_opt.c:20

Referenced by ZSTD_selectBlockCompressor().

◆ ZSTD_compressBlock_btultra_dictMatchState()

size_t ZSTD_compressBlock_btultra_dictMatchState ( ZSTD_matchState_t ms,
seqStore_t seqStore,
U32  rep[ZSTD_REP_NUM],
const void src,
size_t  srcSize 
)

Definition at line 1177 of file zstd_opt.c.

1180{
1181 return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 2 /*optLevel*/, ZSTD_dictMatchState);
1182}

Referenced by ZSTD_selectBlockCompressor().

◆ ZSTD_compressBlock_btultra_extDict()

size_t ZSTD_compressBlock_btultra_extDict ( ZSTD_matchState_t ms,
seqStore_t seqStore,
U32  rep[ZSTD_REP_NUM],
const void src,
size_t  srcSize 
)

Definition at line 1191 of file zstd_opt.c.

1194{
1195 return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 2 /*optLevel*/, ZSTD_extDict);
1196}

Referenced by ZSTD_selectBlockCompressor().

◆ ZSTD_compressBlock_opt_generic()

FORCE_INLINE_TEMPLATE size_t ZSTD_compressBlock_opt_generic ( ZSTD_matchState_t ms,
seqStore_t seqStore,
U32  rep[ZSTD_REP_NUM],
const void src,
size_t  srcSize,
const int  optLevel,
const ZSTD_dictMode_e  dictMode 
)

Definition at line 796 of file zstd_opt.c.

802{
803 optState_t* const optStatePtr = &ms->opt;
804 const BYTE* const istart = (const BYTE*)src;
805 const BYTE* ip = istart;
806 const BYTE* anchor = istart;
807 const BYTE* const iend = istart + srcSize;
808 const BYTE* const ilimit = iend - 8;
809 const BYTE* const base = ms->window.base;
810 const BYTE* const prefixStart = base + ms->window.dictLimit;
811 const ZSTD_compressionParameters* const cParams = &ms->cParams;
812
813 U32 const sufficient_len = MIN(cParams->targetLength, ZSTD_OPT_NUM -1);
814 U32 const minMatch = (cParams->minMatch == 3) ? 3 : 4;
815 U32 nextToUpdate3 = ms->nextToUpdate;
816
817 ZSTD_optimal_t* const opt = optStatePtr->priceTable;
818 ZSTD_match_t* const matches = optStatePtr->matchTable;
819 ZSTD_optimal_t lastSequence;
820
821 /* init */
822 DEBUGLOG(5, "ZSTD_compressBlock_opt_generic: current=%u, prefix=%u, nextToUpdate=%u",
823 (U32)(ip - base), ms->window.dictLimit, ms->nextToUpdate);
824 assert(optLevel <= 2);
825 ZSTD_rescaleFreqs(optStatePtr, (const BYTE*)src, srcSize, optLevel);
826 ip += (ip==prefixStart);
827
828 /* Match Loop */
829 while (ip < ilimit) {
830 U32 cur, last_pos = 0;
831
832 /* find first match */
833 { U32 const litlen = (U32)(ip - anchor);
834 U32 const ll0 = !litlen;
835 U32 const nbMatches = ZSTD_BtGetAllMatches(matches, ms, &nextToUpdate3, ip, iend, dictMode, rep, ll0, minMatch);
836 if (!nbMatches) { ip++; continue; }
837
838 /* initialize opt[0] */
839 { U32 i ; for (i=0; i<ZSTD_REP_NUM; i++) opt[0].rep[i] = rep[i]; }
840 opt[0].mlen = 0; /* means is_a_literal */
841 opt[0].litlen = litlen;
842 /* We don't need to include the actual price of the literals because
843 * it is static for the duration of the forward pass, and is included
844 * in every price. We include the literal length to avoid negative
845 * prices when we subtract the previous literal length.
846 */
847 opt[0].price = ZSTD_litLengthPrice(litlen, optStatePtr, optLevel);
848
849 /* large match -> immediate encoding */
850 { U32 const maxML = matches[nbMatches-1].len;
851 U32 const maxOffset = matches[nbMatches-1].off;
852 DEBUGLOG(6, "found %u matches of maxLength=%u and maxOffCode=%u at cPos=%u => start new series",
853 nbMatches, maxML, maxOffset, (U32)(ip-prefixStart));
854
855 if (maxML > sufficient_len) {
856 lastSequence.litlen = litlen;
857 lastSequence.mlen = maxML;
858 lastSequence.off = maxOffset;
859 DEBUGLOG(6, "large match (%u>%u), immediate encoding",
860 maxML, sufficient_len);
861 cur = 0;
862 last_pos = ZSTD_totalLen(lastSequence);
863 goto _shortestPath;
864 } }
865
866 /* set prices for first matches starting position == 0 */
867 { U32 const literalsPrice = opt[0].price + ZSTD_litLengthPrice(0, optStatePtr, optLevel);
868 U32 pos;
869 U32 matchNb;
870 for (pos = 1; pos < minMatch; pos++) {
871 opt[pos].price = ZSTD_MAX_PRICE; /* mlen, litlen and price will be fixed during forward scanning */
872 }
873 for (matchNb = 0; matchNb < nbMatches; matchNb++) {
874 U32 const offset = matches[matchNb].off;
875 U32 const end = matches[matchNb].len;
876 for ( ; pos <= end ; pos++ ) {
877 U32 const matchPrice = ZSTD_getMatchPrice(offset, pos, optStatePtr, optLevel);
878 U32 const sequencePrice = literalsPrice + matchPrice;
879 DEBUGLOG(7, "rPos:%u => set initial price : %.2f",
880 pos, ZSTD_fCost(sequencePrice));
881 opt[pos].mlen = pos;
882 opt[pos].off = offset;
883 opt[pos].litlen = litlen;
884 opt[pos].price = sequencePrice;
885 } }
886 last_pos = pos-1;
887 }
888 }
889
890 /* check further positions */
891 for (cur = 1; cur <= last_pos; cur++) {
892 const BYTE* const inr = ip + cur;
894 DEBUGLOG(7, "cPos:%zi==rPos:%u", inr-istart, cur)
895
896 /* Fix current position with one literal if cheaper */
897 { U32 const litlen = (opt[cur-1].mlen == 0) ? opt[cur-1].litlen + 1 : 1;
898 int const price = opt[cur-1].price
899 + ZSTD_rawLiteralsCost(ip+cur-1, 1, optStatePtr, optLevel)
900 + ZSTD_litLengthPrice(litlen, optStatePtr, optLevel)
901 - ZSTD_litLengthPrice(litlen-1, optStatePtr, optLevel);
902 assert(price < 1000000000); /* overflow check */
903 if (price <= opt[cur].price) {
904 DEBUGLOG(7, "cPos:%zi==rPos:%u : better price (%.2f<=%.2f) using literal (ll==%u) (hist:%u,%u,%u)",
905 inr-istart, cur, ZSTD_fCost(price), ZSTD_fCost(opt[cur].price), litlen,
906 opt[cur-1].rep[0], opt[cur-1].rep[1], opt[cur-1].rep[2]);
907 opt[cur].mlen = 0;
908 opt[cur].off = 0;
909 opt[cur].litlen = litlen;
910 opt[cur].price = price;
911 } else {
912 DEBUGLOG(7, "cPos:%zi==rPos:%u : literal would cost more (%.2f>%.2f) (hist:%u,%u,%u)",
913 inr-istart, cur, ZSTD_fCost(price), ZSTD_fCost(opt[cur].price),
914 opt[cur].rep[0], opt[cur].rep[1], opt[cur].rep[2]);
915 }
916 }
917
918 /* Set the repcodes of the current position. We must do it here
919 * because we rely on the repcodes of the 2nd to last sequence being
920 * correct to set the next chunks repcodes during the backward
921 * traversal.
922 */
923 ZSTD_STATIC_ASSERT(sizeof(opt[cur].rep) == sizeof(repcodes_t));
924 assert(cur >= opt[cur].mlen);
925 if (opt[cur].mlen != 0) {
926 U32 const prev = cur - opt[cur].mlen;
927 repcodes_t newReps = ZSTD_updateRep(opt[prev].rep, opt[cur].off, opt[cur].litlen==0);
928 memcpy(opt[cur].rep, &newReps, sizeof(repcodes_t));
929 } else {
930 memcpy(opt[cur].rep, opt[cur - 1].rep, sizeof(repcodes_t));
931 }
932
933 /* last match must start at a minimum distance of 8 from oend */
934 if (inr > ilimit) continue;
935
936 if (cur == last_pos) break;
937
938 if ( (optLevel==0) /*static_test*/
939 && (opt[cur+1].price <= opt[cur].price + (BITCOST_MULTIPLIER/2)) ) {
940 DEBUGLOG(7, "move to next rPos:%u : price is <=", cur+1);
941 continue; /* skip unpromising positions; about ~+6% speed, -0.01 ratio */
942 }
943
944 { U32 const ll0 = (opt[cur].mlen != 0);
945 U32 const litlen = (opt[cur].mlen == 0) ? opt[cur].litlen : 0;
946 U32 const previousPrice = opt[cur].price;
947 U32 const basePrice = previousPrice + ZSTD_litLengthPrice(0, optStatePtr, optLevel);
948 U32 const nbMatches = ZSTD_BtGetAllMatches(matches, ms, &nextToUpdate3, inr, iend, dictMode, opt[cur].rep, ll0, minMatch);
949 U32 matchNb;
950 if (!nbMatches) {
951 DEBUGLOG(7, "rPos:%u : no match found", cur);
952 continue;
953 }
954
955 { U32 const maxML = matches[nbMatches-1].len;
956 DEBUGLOG(7, "cPos:%zi==rPos:%u, found %u matches, of maxLength=%u",
957 inr-istart, cur, nbMatches, maxML);
958
959 if ( (maxML > sufficient_len)
960 || (cur + maxML >= ZSTD_OPT_NUM) ) {
961 lastSequence.mlen = maxML;
962 lastSequence.off = matches[nbMatches-1].off;
963 lastSequence.litlen = litlen;
964 cur -= (opt[cur].mlen==0) ? opt[cur].litlen : 0; /* last sequence is actually only literals, fix cur to last match - note : may underflow, in which case, it's first sequence, and it's okay */
965 last_pos = cur + ZSTD_totalLen(lastSequence);
966 if (cur > ZSTD_OPT_NUM) cur = 0; /* underflow => first match */
967 goto _shortestPath;
968 } }
969
970 /* set prices using matches found at position == cur */
971 for (matchNb = 0; matchNb < nbMatches; matchNb++) {
972 U32 const offset = matches[matchNb].off;
973 U32 const lastML = matches[matchNb].len;
974 U32 const startML = (matchNb>0) ? matches[matchNb-1].len+1 : minMatch;
975 U32 mlen;
976
977 DEBUGLOG(7, "testing match %u => offCode=%4u, mlen=%2u, llen=%2u",
978 matchNb, matches[matchNb].off, lastML, litlen);
979
980 for (mlen = lastML; mlen >= startML; mlen--) { /* scan downward */
981 U32 const pos = cur + mlen;
982 int const price = basePrice + ZSTD_getMatchPrice(offset, mlen, optStatePtr, optLevel);
983
984 if ((pos > last_pos) || (price < opt[pos].price)) {
985 DEBUGLOG(7, "rPos:%u (ml=%2u) => new better price (%.2f<%.2f)",
986 pos, mlen, ZSTD_fCost(price), ZSTD_fCost(opt[pos].price));
987 while (last_pos < pos) { opt[last_pos+1].price = ZSTD_MAX_PRICE; last_pos++; } /* fill empty positions */
988 opt[pos].mlen = mlen;
989 opt[pos].off = offset;
990 opt[pos].litlen = litlen;
991 opt[pos].price = price;
992 } else {
993 DEBUGLOG(7, "rPos:%u (ml=%2u) => new price is worse (%.2f>=%.2f)",
994 pos, mlen, ZSTD_fCost(price), ZSTD_fCost(opt[pos].price));
995 if (optLevel==0) break; /* early update abort; gets ~+10% speed for about -0.01 ratio loss */
996 }
997 } } }
998 } /* for (cur = 1; cur <= last_pos; cur++) */
999
1000 lastSequence = opt[last_pos];
1001 cur = last_pos > ZSTD_totalLen(lastSequence) ? last_pos - ZSTD_totalLen(lastSequence) : 0; /* single sequence, and it starts before `ip` */
1002 assert(cur < ZSTD_OPT_NUM); /* control overflow*/
1003
1004_shortestPath: /* cur, last_pos, best_mlen, best_off have to be set */
1005 assert(opt[0].mlen == 0);
1006
1007 /* Set the next chunk's repcodes based on the repcodes of the beginning
1008 * of the last match, and the last sequence. This avoids us having to
1009 * update them while traversing the sequences.
1010 */
1011 if (lastSequence.mlen != 0) {
1012 repcodes_t reps = ZSTD_updateRep(opt[cur].rep, lastSequence.off, lastSequence.litlen==0);
1013 memcpy(rep, &reps, sizeof(reps));
1014 } else {
1015 memcpy(rep, opt[cur].rep, sizeof(repcodes_t));
1016 }
1017
1018 { U32 const storeEnd = cur + 1;
1019 U32 storeStart = storeEnd;
1020 U32 seqPos = cur;
1021
1022 DEBUGLOG(6, "start reverse traversal (last_pos:%u, cur:%u)",
1023 last_pos, cur); (void)last_pos;
1024 assert(storeEnd < ZSTD_OPT_NUM);
1025 DEBUGLOG(6, "last sequence copied into pos=%u (llen=%u,mlen=%u,ofc=%u)",
1026 storeEnd, lastSequence.litlen, lastSequence.mlen, lastSequence.off);
1027 opt[storeEnd] = lastSequence;
1028 while (seqPos > 0) {
1029 U32 const backDist = ZSTD_totalLen(opt[seqPos]);
1030 storeStart--;
1031 DEBUGLOG(6, "sequence from rPos=%u copied into pos=%u (llen=%u,mlen=%u,ofc=%u)",
1032 seqPos, storeStart, opt[seqPos].litlen, opt[seqPos].mlen, opt[seqPos].off);
1033 opt[storeStart] = opt[seqPos];
1034 seqPos = (seqPos > backDist) ? seqPos - backDist : 0;
1035 }
1036
1037 /* save sequences */
1038 DEBUGLOG(6, "sending selected sequences into seqStore")
1039 { U32 storePos;
1040 for (storePos=storeStart; storePos <= storeEnd; storePos++) {
1041 U32 const llen = opt[storePos].litlen;
1042 U32 const mlen = opt[storePos].mlen;
1043 U32 const offCode = opt[storePos].off;
1044 U32 const advance = llen + mlen;
1045 DEBUGLOG(6, "considering seq starting at %zi, llen=%u, mlen=%u",
1046 anchor - istart, (unsigned)llen, (unsigned)mlen);
1047
1048 if (mlen==0) { /* only literals => must be last "sequence", actually starting a new stream of sequences */
1049 assert(storePos == storeEnd); /* must be last sequence */
1050 ip = anchor + llen; /* last "sequence" is a bunch of literals => don't progress anchor */
1051 continue; /* will finish */
1052 }
1053
1054 assert(anchor + llen <= iend);
1055 ZSTD_updateStats(optStatePtr, llen, anchor, offCode, mlen);
1056 ZSTD_storeSeq(seqStore, llen, anchor, iend, offCode, mlen-MINMATCH);
1057 anchor += advance;
1058 ip = anchor;
1059 } }
1060 ZSTD_setBasePrices(optStatePtr, optLevel);
1061 }
1062 } /* while (ip < ilimit) */
1063
1064 /* Return the last literals size */
1065 return (size_t)(iend - anchor);
1066}
_STLP_MOVE_TO_STD_NAMESPACE void _STLP_CALL advance(_InputIterator &__i, _Distance __n)
#define MIN(x, y)
Definition: rdesktop.h:171
FxCollectionEntry * cur
GLuint GLuint end
Definition: gl.h:1545
GLenum GLsizei len
Definition: glext.h:6722
GLintptr offset
Definition: glext.h:5920
GLsizei GLenum const GLvoid GLsizei GLenum GLbyte GLbyte GLbyte GLdouble GLdouble GLdouble GLfloat GLfloat GLfloat GLint GLint GLint GLshort GLshort GLshort GLubyte GLubyte GLubyte GLuint GLuint GLuint GLushort GLushort GLushort GLbyte GLbyte GLbyte GLbyte GLdouble GLdouble GLdouble GLdouble GLfloat GLfloat GLfloat GLfloat GLint GLint GLint GLint GLshort GLshort GLshort GLshort GLubyte GLubyte GLubyte GLubyte GLuint GLuint GLuint GLuint GLushort GLushort GLushort GLushort GLboolean const GLdouble const GLfloat const GLint const GLshort const GLbyte const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLdouble const GLfloat const GLfloat const GLint const GLint const GLshort const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort GLenum GLenum GLenum GLfloat GLenum GLint GLenum GLenum GLenum GLfloat GLenum GLenum GLint GLenum GLfloat GLenum GLint GLint GLushort GLenum GLenum GLfloat GLenum GLenum GLint GLfloat const GLubyte GLenum GLenum GLenum const GLfloat GLenum GLenum const GLint GLenum GLint GLint GLsizei GLsizei GLint GLenum GLenum const GLvoid GLenum GLenum const GLfloat GLenum GLenum const GLint GLenum GLenum const GLdouble GLenum GLenum const GLfloat GLenum GLenum const GLint GLsizei GLuint GLfloat GLuint GLbitfield GLfloat GLint GLuint GLboolean GLenum GLfloat GLenum GLbitfield GLenum GLfloat GLfloat GLint GLint const GLfloat GLenum GLfloat GLfloat GLint GLint GLfloat GLfloat GLint GLint const GLfloat GLint GLfloat GLfloat GLint GLfloat GLfloat GLint GLfloat GLfloat const GLdouble const GLfloat const GLdouble const GLfloat GLint i
Definition: glfuncs.h:248
#define memcpy(s1, s2, n)
Definition: mkisofs.h:878
ZSTD_match_t * matchTable
ZSTD_optimal_t * priceTable
MEM_STATIC repcodes_t ZSTD_updateRep(U32 const rep[3], U32 const offset, U32 const ll0)
HINT_INLINE UNUSED_ATTR void ZSTD_storeSeq(seqStore_t *seqStorePtr, size_t litLength, const BYTE *literals, const BYTE *litLimit, U32 offCode, size_t mlBase)
#define ZSTD_OPT_NUM
#define MINMATCH
#define ZSTD_STATIC_ASSERT(c)
Definition: zstd_internal.h:45
#define ZSTD_REP_NUM
FORCE_INLINE_TEMPLATE U32 ZSTD_getMatchPrice(U32 const offset, U32 const matchLength, const optState_t *const optPtr, int const optLevel)
Definition: zstd_opt.c:257
static void ZSTD_rescaleFreqs(optState_t *const optPtr, const BYTE *const src, size_t const srcSize, int const optLevel)
Definition: zstd_opt.c:104
static void ZSTD_setBasePrices(optState_t *optPtr, int optLevel)
Definition: zstd_opt.c:72
static U32 ZSTD_litLengthPrice(U32 const litLength, const optState_t *const optPtr, int optLevel)
Definition: zstd_opt.c:240
static void ZSTD_updateStats(optState_t *const optPtr, U32 litLength, const BYTE *literals, U32 offsetCode, U32 matchLength)
Definition: zstd_opt.c:288
static U32 ZSTD_totalLen(ZSTD_optimal_t sol)
Definition: zstd_opt.c:773
static U32 ZSTD_rawLiteralsCost(const BYTE *const literals, U32 const litLength, const optState_t *const optPtr, int optLevel)
Definition: zstd_opt.c:215
#define ZSTD_MAX_PRICE
Definition: zstd_opt.c:18
FORCE_INLINE_TEMPLATE U32 ZSTD_BtGetAllMatches(ZSTD_match_t *matches, ZSTD_matchState_t *ms, U32 *nextToUpdate3, const BYTE *ip, const BYTE *const iHighLimit, const ZSTD_dictMode_e dictMode, const U32 rep[ZSTD_REP_NUM], U32 const ll0, U32 const lengthToBeat)
Definition: zstd_opt.c:742

Referenced by ZSTD_compressBlock_btopt(), ZSTD_compressBlock_btopt_dictMatchState(), ZSTD_compressBlock_btopt_extDict(), ZSTD_compressBlock_btultra(), ZSTD_compressBlock_btultra2(), ZSTD_compressBlock_btultra_dictMatchState(), ZSTD_compressBlock_btultra_extDict(), and ZSTD_initStats_ultra().

◆ ZSTD_compressedLiterals()

static int ZSTD_compressedLiterals ( optState_t const *const  optPtr)
static

Definition at line 67 of file zstd_opt.c.

68{
69 return optPtr->literalCompressionMode != ZSTD_lcm_uncompressed;
70}

Referenced by ZSTD_rawLiteralsCost(), ZSTD_rescaleFreqs(), ZSTD_setBasePrices(), ZSTD_updateStats(), and ZSTD_upscaleStats().

◆ ZSTD_downscaleStat()

static U32 ZSTD_downscaleStat ( unsigned table,
U32  lastEltIndex,
int  malus 
)
static

Definition at line 85 of file zstd_opt.c.

86{
87 U32 s, sum=0;
88 DEBUGLOG(5, "ZSTD_downscaleStat (nbElts=%u)", (unsigned)lastEltIndex+1);
89 assert(ZSTD_FREQ_DIV+malus > 0 && ZSTD_FREQ_DIV+malus < 31);
90 for (s=0; s<lastEltIndex+1; s++) {
91 table[s] = 1 + (table[s] >> (ZSTD_FREQ_DIV+malus));
92 sum += table[s];
93 }
94 return sum;
95}
GLdouble s
Definition: gl.h:2039
static int sum(int x_, int y_)
Definition: ptr2_test.cpp:35
#define ZSTD_FREQ_DIV
Definition: zstd_opt.c:17

Referenced by ZSTD_rescaleFreqs().

◆ ZSTD_fracWeight()

MEM_STATIC U32 ZSTD_fracWeight ( U32  rawStat)

Definition at line 46 of file zstd_opt.c.

47{
48 U32 const stat = rawStat + 1;
49 U32 const hb = ZSTD_highbit32(stat);
50 U32 const BWeight = hb * BITCOST_MULTIPLIER;
51 U32 const FWeight = (stat << BITCOST_ACCURACY) >> hb;
52 U32 const weight = BWeight + FWeight;
53 assert(hb + BITCOST_ACCURACY < 31);
54 return weight;
55}
weight
Definition: sortkey.c:157
#define BITCOST_ACCURACY
Definition: zstd_opt.c:36

◆ ZSTD_getMatchPrice()

FORCE_INLINE_TEMPLATE U32 ZSTD_getMatchPrice ( U32 const  offset,
U32 const  matchLength,
const optState_t *const  optPtr,
int const  optLevel 
)

Definition at line 257 of file zstd_opt.c.

261{
262 U32 price;
263 U32 const offCode = ZSTD_highbit32(offset+1);
264 U32 const mlBase = matchLength - MINMATCH;
265 assert(matchLength >= MINMATCH);
266
267 if (optPtr->priceType == zop_predef) /* fixed scheme, do not use statistics */
268 return WEIGHT(mlBase, optLevel) + ((16 + offCode) * BITCOST_MULTIPLIER);
269
270 /* dynamic statistics */
271 price = (offCode * BITCOST_MULTIPLIER) + (optPtr->offCodeSumBasePrice - WEIGHT(optPtr->offCodeFreq[offCode], optLevel));
272 if ((optLevel<2) /*static*/ && offCode >= 20)
273 price += (offCode-19)*2 * BITCOST_MULTIPLIER; /* handicap for long distance offsets, favor decompression speed */
274
275 /* match Length */
276 { U32 const mlCode = ZSTD_MLcode(mlBase);
277 price += (ML_bits[mlCode] * BITCOST_MULTIPLIER) + (optPtr->matchLengthSumBasePrice - WEIGHT(optPtr->matchLengthFreq[mlCode], optLevel));
278 }
279
280 price += BITCOST_MULTIPLIER / 5; /* heuristic : make matches a bit more costly to favor less sequences -> faster decompression speed */
281
282 DEBUGLOG(8, "ZSTD_getMatchPrice(ml:%u) = %u", matchLength, price);
283 return price;
284}
unsigned * offCodeFreq
ZSTD_OptPrice_e priceType
unsigned * matchLengthFreq
MEM_STATIC U32 ZSTD_MLcode(U32 mlBase)
static const U32 ML_bits[MaxML+1]
#define WEIGHT(stat, opt)
Definition: zstd_opt.c:38

Referenced by ZSTD_compressBlock_opt_generic().

◆ ZSTD_initStats_ultra()

static void ZSTD_initStats_ultra ( ZSTD_matchState_t ms,
seqStore_t seqStore,
U32  rep[ZSTD_REP_NUM],
const void src,
size_t  srcSize 
)
static

Definition at line 1107 of file zstd_opt.c.

1111{
1112 U32 tmpRep[ZSTD_REP_NUM]; /* updated rep codes will sink here */
1113 memcpy(tmpRep, rep, sizeof(tmpRep));
1114
1115 DEBUGLOG(4, "ZSTD_initStats_ultra (srcSize=%zu)", srcSize);
1116 assert(ms->opt.litLengthSum == 0); /* first block */
1117 assert(seqStore->sequences == seqStore->sequencesStart); /* no ldm */
1118 assert(ms->window.dictLimit == ms->window.lowLimit); /* no dictionary */
1119 assert(ms->window.dictLimit - ms->nextToUpdate <= 1); /* no prefix (note: intentional overflow, defined as 2-complement) */
1120
1121 ZSTD_compressBlock_opt_generic(ms, seqStore, tmpRep, src, srcSize, 2 /*optLevel*/, ZSTD_noDict); /* generate stats into ms->opt*/
1122
1123 /* invalidate first scan from history */
1124 ZSTD_resetSeqStore(seqStore);
1125 ms->window.base -= srcSize;
1126 ms->window.dictLimit += (U32)srcSize;
1127 ms->window.lowLimit = ms->window.dictLimit;
1128 ms->nextToUpdate = ms->window.dictLimit;
1129
1130 /* re-inforce weight of collected statistics */
1131 ZSTD_upscaleStats(&ms->opt);
1132}
void ZSTD_resetSeqStore(seqStore_t *ssPtr)
MEM_STATIC void ZSTD_upscaleStats(optState_t *optPtr)
Definition: zstd_opt.c:1092

Referenced by ZSTD_compressBlock_btultra2().

◆ ZSTD_insertAndFindFirstIndexHash3()

static U32 ZSTD_insertAndFindFirstIndexHash3 ( ZSTD_matchState_t ms,
U32 nextToUpdate3,
const BYTE *const  ip 
)
static

Definition at line 341 of file zstd_opt.c.

344{
345 U32* const hashTable3 = ms->hashTable3;
346 U32 const hashLog3 = ms->hashLog3;
347 const BYTE* const base = ms->window.base;
348 U32 idx = *nextToUpdate3;
349 U32 const target = (U32)(ip - base);
350 size_t const hash3 = ZSTD_hash3Ptr(ip, hashLog3);
351 assert(hashLog3 > 0);
352
353 while(idx < target) {
354 hashTable3[ZSTD_hash3Ptr(base+idx, hashLog3)] = idx;
355 idx++;
356 }
357
358 *nextToUpdate3 = target;
359 return hashTable3[hash3];
360}
unsigned int idx
Definition: utils.c:41
GLenum target
Definition: glext.h:7315
MEM_STATIC size_t ZSTD_hash3Ptr(const void *ptr, U32 h)

Referenced by ZSTD_insertBtAndGetAllMatches().

◆ ZSTD_insertBt1()

static U32 ZSTD_insertBt1 ( ZSTD_matchState_t ms,
const BYTE *const  ip,
const BYTE *const  iend,
U32 const  mls,
const int  extDict 
)
static

ZSTD_insertBt1() : add one or multiple positions to tree. ip : assumed <= iend-8 .

Returns
: nb of positions added

Definition at line 369 of file zstd_opt.c.

373{
374 const ZSTD_compressionParameters* const cParams = &ms->cParams;
375 U32* const hashTable = ms->hashTable;
376 U32 const hashLog = cParams->hashLog;
377 size_t const h = ZSTD_hashPtr(ip, hashLog, mls);
378 U32* const bt = ms->chainTable;
379 U32 const btLog = cParams->chainLog - 1;
380 U32 const btMask = (1 << btLog) - 1;
381 U32 matchIndex = hashTable[h];
382 size_t commonLengthSmaller=0, commonLengthLarger=0;
383 const BYTE* const base = ms->window.base;
384 const BYTE* const dictBase = ms->window.dictBase;
385 const U32 dictLimit = ms->window.dictLimit;
386 const BYTE* const dictEnd = dictBase + dictLimit;
387 const BYTE* const prefixStart = base + dictLimit;
388 const BYTE* match;
389 const U32 current = (U32)(ip-base);
390 const U32 btLow = btMask >= current ? 0 : current - btMask;
391 U32* smallerPtr = bt + 2*(current&btMask);
392 U32* largerPtr = smallerPtr + 1;
393 U32 dummy32; /* to be nullified at the end */
394 U32 const windowLow = ms->window.lowLimit;
395 U32 matchEndIdx = current+8+1;
396 size_t bestLength = 8;
397 U32 nbCompares = 1U << cParams->searchLog;
398#ifdef ZSTD_C_PREDICT
399 U32 predictedSmall = *(bt + 2*((current-1)&btMask) + 0);
400 U32 predictedLarge = *(bt + 2*((current-1)&btMask) + 1);
401 predictedSmall += (predictedSmall>0);
402 predictedLarge += (predictedLarge>0);
403#endif /* ZSTD_C_PREDICT */
404
405 DEBUGLOG(8, "ZSTD_insertBt1 (%u)", current);
406
407 assert(ip <= iend-8); /* required for h calculation */
408 hashTable[h] = current; /* Update Hash Table */
409
410 assert(windowLow > 0);
411 while (nbCompares-- && (matchIndex >= windowLow)) {
412 U32* const nextPtr = bt + 2*(matchIndex & btMask);
413 size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */
414 assert(matchIndex < current);
415
416#ifdef ZSTD_C_PREDICT /* note : can create issues when hlog small <= 11 */
417 const U32* predictPtr = bt + 2*((matchIndex-1) & btMask); /* written this way, as bt is a roll buffer */
418 if (matchIndex == predictedSmall) {
419 /* no need to check length, result known */
420 *smallerPtr = matchIndex;
421 if (matchIndex <= btLow) { smallerPtr=&dummy32; break; } /* beyond tree size, stop the search */
422 smallerPtr = nextPtr+1; /* new "smaller" => larger of match */
423 matchIndex = nextPtr[1]; /* new matchIndex larger than previous (closer to current) */
424 predictedSmall = predictPtr[1] + (predictPtr[1]>0);
425 continue;
426 }
427 if (matchIndex == predictedLarge) {
428 *largerPtr = matchIndex;
429 if (matchIndex <= btLow) { largerPtr=&dummy32; break; } /* beyond tree size, stop the search */
430 largerPtr = nextPtr;
431 matchIndex = nextPtr[0];
432 predictedLarge = predictPtr[0] + (predictPtr[0]>0);
433 continue;
434 }
435#endif
436
437 if (!extDict || (matchIndex+matchLength >= dictLimit)) {
438 assert(matchIndex+matchLength >= dictLimit); /* might be wrong if actually extDict */
439 match = base + matchIndex;
440 matchLength += ZSTD_count(ip+matchLength, match+matchLength, iend);
441 } else {
442 match = dictBase + matchIndex;
443 matchLength += ZSTD_count_2segments(ip+matchLength, match+matchLength, iend, dictEnd, prefixStart);
444 if (matchIndex+matchLength >= dictLimit)
445 match = base + matchIndex; /* to prepare for next usage of match[matchLength] */
446 }
447
448 if (matchLength > bestLength) {
449 bestLength = matchLength;
450 if (matchLength > matchEndIdx - matchIndex)
451 matchEndIdx = matchIndex + (U32)matchLength;
452 }
453
454 if (ip+matchLength == iend) { /* equal : no way to know if inf or sup */
455 break; /* drop , to guarantee consistency ; miss a bit of compression, but other solutions can corrupt tree */
456 }
457
458 if (match[matchLength] < ip[matchLength]) { /* necessarily within buffer */
459 /* match is smaller than current */
460 *smallerPtr = matchIndex; /* update smaller idx */
461 commonLengthSmaller = matchLength; /* all smaller will now have at least this guaranteed common length */
462 if (matchIndex <= btLow) { smallerPtr=&dummy32; break; } /* beyond tree size, stop searching */
463 smallerPtr = nextPtr+1; /* new "candidate" => larger than match, which was smaller than target */
464 matchIndex = nextPtr[1]; /* new matchIndex, larger than previous and closer to current */
465 } else {
466 /* match is larger than current */
467 *largerPtr = matchIndex;
468 commonLengthLarger = matchLength;
469 if (matchIndex <= btLow) { largerPtr=&dummy32; break; } /* beyond tree size, stop searching */
470 largerPtr = nextPtr;
471 matchIndex = nextPtr[0];
472 } }
473
474 *smallerPtr = *largerPtr = 0;
475 { U32 positions = 0;
476 if (bestLength > 384) positions = MIN(192, (U32)(bestLength - 384)); /* speed optimization */
477 assert(matchEndIdx > current + 8);
478 return MAX(positions, matchEndIdx - (current + 8));
479 }
480}
#define MAX(x, y)
Definition: rdesktop.h:175
void mls(int argc, const char *argv[])
Definition: cmds.c:1168
GLfloat GLfloat GLfloat GLfloat h
Definition: glext.h:7723
Definition: match.c:28
MEM_STATIC size_t ZSTD_count_2segments(const BYTE *ip, const BYTE *match, const BYTE *iEnd, const BYTE *mEnd, const BYTE *iStart)
MEM_STATIC size_t ZSTD_hashPtr(const void *p, U32 hBits, U32 mls)
MEM_STATIC size_t ZSTD_count(const BYTE *pIn, const BYTE *pMatch, const BYTE *const pInLimit)

Referenced by ZSTD_updateTree_internal().

◆ ZSTD_insertBtAndGetAllMatches()

FORCE_INLINE_TEMPLATE U32 ZSTD_insertBtAndGetAllMatches ( ZSTD_match_t matches,
ZSTD_matchState_t ms,
U32 nextToUpdate3,
const BYTE *const  ip,
const BYTE *const  iLimit,
const ZSTD_dictMode_e  dictMode,
const U32  rep[ZSTD_REP_NUM],
U32 const  ll0,
const U32  lengthToBeat,
U32 const  mls 
)

Definition at line 509 of file zstd_opt.c.

518{
519 const ZSTD_compressionParameters* const cParams = &ms->cParams;
520 U32 const sufficient_len = MIN(cParams->targetLength, ZSTD_OPT_NUM -1);
521 const BYTE* const base = ms->window.base;
522 U32 const current = (U32)(ip-base);
523 U32 const hashLog = cParams->hashLog;
524 U32 const minMatch = (mls==3) ? 3 : 4;
525 U32* const hashTable = ms->hashTable;
526 size_t const h = ZSTD_hashPtr(ip, hashLog, mls);
527 U32 matchIndex = hashTable[h];
528 U32* const bt = ms->chainTable;
529 U32 const btLog = cParams->chainLog - 1;
530 U32 const btMask= (1U << btLog) - 1;
531 size_t commonLengthSmaller=0, commonLengthLarger=0;
532 const BYTE* const dictBase = ms->window.dictBase;
533 U32 const dictLimit = ms->window.dictLimit;
534 const BYTE* const dictEnd = dictBase + dictLimit;
535 const BYTE* const prefixStart = base + dictLimit;
536 U32 const btLow = (btMask >= current) ? 0 : current - btMask;
537 U32 const windowLow = ZSTD_getLowestMatchIndex(ms, current, cParams->windowLog);
538 U32 const matchLow = windowLow ? windowLow : 1;
539 U32* smallerPtr = bt + 2*(current&btMask);
540 U32* largerPtr = bt + 2*(current&btMask) + 1;
541 U32 matchEndIdx = current+8+1; /* farthest referenced position of any match => detects repetitive patterns */
542 U32 dummy32; /* to be nullified at the end */
543 U32 mnum = 0;
544 U32 nbCompares = 1U << cParams->searchLog;
545
546 const ZSTD_matchState_t* dms = dictMode == ZSTD_dictMatchState ? ms->dictMatchState : NULL;
547 const ZSTD_compressionParameters* const dmsCParams =
548 dictMode == ZSTD_dictMatchState ? &dms->cParams : NULL;
549 const BYTE* const dmsBase = dictMode == ZSTD_dictMatchState ? dms->window.base : NULL;
550 const BYTE* const dmsEnd = dictMode == ZSTD_dictMatchState ? dms->window.nextSrc : NULL;
551 U32 const dmsHighLimit = dictMode == ZSTD_dictMatchState ? (U32)(dmsEnd - dmsBase) : 0;
552 U32 const dmsLowLimit = dictMode == ZSTD_dictMatchState ? dms->window.lowLimit : 0;
553 U32 const dmsIndexDelta = dictMode == ZSTD_dictMatchState ? windowLow - dmsHighLimit : 0;
554 U32 const dmsHashLog = dictMode == ZSTD_dictMatchState ? dmsCParams->hashLog : hashLog;
555 U32 const dmsBtLog = dictMode == ZSTD_dictMatchState ? dmsCParams->chainLog - 1 : btLog;
556 U32 const dmsBtMask = dictMode == ZSTD_dictMatchState ? (1U << dmsBtLog) - 1 : 0;
557 U32 const dmsBtLow = dictMode == ZSTD_dictMatchState && dmsBtMask < dmsHighLimit - dmsLowLimit ? dmsHighLimit - dmsBtMask : dmsLowLimit;
558
559 size_t bestLength = lengthToBeat-1;
560 DEBUGLOG(8, "ZSTD_insertBtAndGetAllMatches: current=%u", current);
561
562 /* check repCode */
563 assert(ll0 <= 1); /* necessarily 1 or 0 */
564 { U32 const lastR = ZSTD_REP_NUM + ll0;
565 U32 repCode;
566 for (repCode = ll0; repCode < lastR; repCode++) {
567 U32 const repOffset = (repCode==ZSTD_REP_NUM) ? (rep[0] - 1) : rep[repCode];
568 U32 const repIndex = current - repOffset;
569 U32 repLen = 0;
570 assert(current >= dictLimit);
571 if (repOffset-1 /* intentional overflow, discards 0 and -1 */ < current-dictLimit) { /* equivalent to `current > repIndex >= dictLimit` */
572 /* We must validate the repcode offset because when we're using a dictionary the
573 * valid offset range shrinks when the dictionary goes out of bounds.
574 */
575 if ((repIndex >= windowLow) & (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(ip - repOffset, minMatch))) {
576 repLen = (U32)ZSTD_count(ip+minMatch, ip+minMatch-repOffset, iLimit) + minMatch;
577 }
578 } else { /* repIndex < dictLimit || repIndex >= current */
579 const BYTE* const repMatch = dictMode == ZSTD_dictMatchState ?
580 dmsBase + repIndex - dmsIndexDelta :
581 dictBase + repIndex;
582 assert(current >= windowLow);
583 if ( dictMode == ZSTD_extDict
584 && ( ((repOffset-1) /*intentional overflow*/ < current - windowLow) /* equivalent to `current > repIndex >= windowLow` */
585 & (((U32)((dictLimit-1) - repIndex) >= 3) ) /* intentional overflow : do not test positions overlapping 2 memory segments */)
586 && (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(repMatch, minMatch)) ) {
587 repLen = (U32)ZSTD_count_2segments(ip+minMatch, repMatch+minMatch, iLimit, dictEnd, prefixStart) + minMatch;
588 }
589 if (dictMode == ZSTD_dictMatchState
590 && ( ((repOffset-1) /*intentional overflow*/ < current - (dmsLowLimit + dmsIndexDelta)) /* equivalent to `current > repIndex >= dmsLowLimit` */
591 & ((U32)((dictLimit-1) - repIndex) >= 3) ) /* intentional overflow : do not test positions overlapping 2 memory segments */
592 && (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(repMatch, minMatch)) ) {
593 repLen = (U32)ZSTD_count_2segments(ip+minMatch, repMatch+minMatch, iLimit, dmsEnd, prefixStart) + minMatch;
594 } }
595 /* save longer solution */
596 if (repLen > bestLength) {
597 DEBUGLOG(8, "found repCode %u (ll0:%u, offset:%u) of length %u",
598 repCode, ll0, repOffset, repLen);
599 bestLength = repLen;
600 matches[mnum].off = repCode - ll0;
601 matches[mnum].len = (U32)repLen;
602 mnum++;
603 if ( (repLen > sufficient_len)
604 | (ip+repLen == iLimit) ) { /* best possible */
605 return mnum;
606 } } } }
607
608 /* HC3 match finder */
609 if ((mls == 3) /*static*/ && (bestLength < mls)) {
610 U32 const matchIndex3 = ZSTD_insertAndFindFirstIndexHash3(ms, nextToUpdate3, ip);
611 if ((matchIndex3 >= matchLow)
612 & (current - matchIndex3 < (1<<18)) /*heuristic : longer distance likely too expensive*/ ) {
613 size_t mlen;
614 if ((dictMode == ZSTD_noDict) /*static*/ || (dictMode == ZSTD_dictMatchState) /*static*/ || (matchIndex3 >= dictLimit)) {
615 const BYTE* const match = base + matchIndex3;
616 mlen = ZSTD_count(ip, match, iLimit);
617 } else {
618 const BYTE* const match = dictBase + matchIndex3;
619 mlen = ZSTD_count_2segments(ip, match, iLimit, dictEnd, prefixStart);
620 }
621
622 /* save best solution */
623 if (mlen >= mls /* == 3 > bestLength */) {
624 DEBUGLOG(8, "found small match with hlog3, of length %u",
625 (U32)mlen);
626 bestLength = mlen;
627 assert(current > matchIndex3);
628 assert(mnum==0); /* no prior solution */
629 matches[0].off = (current - matchIndex3) + ZSTD_REP_MOVE;
630 matches[0].len = (U32)mlen;
631 mnum = 1;
632 if ( (mlen > sufficient_len) |
633 (ip+mlen == iLimit) ) { /* best possible length */
634 ms->nextToUpdate = current+1; /* skip insertion */
635 return 1;
636 } } }
637 /* no dictMatchState lookup: dicts don't have a populated HC3 table */
638 }
639
640 hashTable[h] = current; /* Update Hash Table */
641
642 while (nbCompares-- && (matchIndex >= matchLow)) {
643 U32* const nextPtr = bt + 2*(matchIndex & btMask);
644 const BYTE* match;
645 size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */
646 assert(current > matchIndex);
647
648 if ((dictMode == ZSTD_noDict) || (dictMode == ZSTD_dictMatchState) || (matchIndex+matchLength >= dictLimit)) {
649 assert(matchIndex+matchLength >= dictLimit); /* ensure the condition is correct when !extDict */
650 match = base + matchIndex;
651 if (matchIndex >= dictLimit) assert(memcmp(match, ip, matchLength) == 0); /* ensure early section of match is equal as expected */
652 matchLength += ZSTD_count(ip+matchLength, match+matchLength, iLimit);
653 } else {
654 match = dictBase + matchIndex;
655 assert(memcmp(match, ip, matchLength) == 0); /* ensure early section of match is equal as expected */
656 matchLength += ZSTD_count_2segments(ip+matchLength, match+matchLength, iLimit, dictEnd, prefixStart);
657 if (matchIndex+matchLength >= dictLimit)
658 match = base + matchIndex; /* prepare for match[matchLength] read */
659 }
660
661 if (matchLength > bestLength) {
662 DEBUGLOG(8, "found match of length %u at distance %u (offCode=%u)",
663 (U32)matchLength, current - matchIndex, current - matchIndex + ZSTD_REP_MOVE);
664 assert(matchEndIdx > matchIndex);
665 if (matchLength > matchEndIdx - matchIndex)
666 matchEndIdx = matchIndex + (U32)matchLength;
667 bestLength = matchLength;
668 matches[mnum].off = (current - matchIndex) + ZSTD_REP_MOVE;
669 matches[mnum].len = (U32)matchLength;
670 mnum++;
671 if ( (matchLength > ZSTD_OPT_NUM)
672 | (ip+matchLength == iLimit) /* equal : no way to know if inf or sup */) {
673 if (dictMode == ZSTD_dictMatchState) nbCompares = 0; /* break should also skip searching dms */
674 break; /* drop, to preserve bt consistency (miss a little bit of compression) */
675 }
676 }
677
678 if (match[matchLength] < ip[matchLength]) {
679 /* match smaller than current */
680 *smallerPtr = matchIndex; /* update smaller idx */
681 commonLengthSmaller = matchLength; /* all smaller will now have at least this guaranteed common length */
682 if (matchIndex <= btLow) { smallerPtr=&dummy32; break; } /* beyond tree size, stop the search */
683 smallerPtr = nextPtr+1; /* new candidate => larger than match, which was smaller than current */
684 matchIndex = nextPtr[1]; /* new matchIndex, larger than previous, closer to current */
685 } else {
686 *largerPtr = matchIndex;
687 commonLengthLarger = matchLength;
688 if (matchIndex <= btLow) { largerPtr=&dummy32; break; } /* beyond tree size, stop the search */
689 largerPtr = nextPtr;
690 matchIndex = nextPtr[0];
691 } }
692
693 *smallerPtr = *largerPtr = 0;
694
695 if (dictMode == ZSTD_dictMatchState && nbCompares) {
696 size_t const dmsH = ZSTD_hashPtr(ip, dmsHashLog, mls);
697 U32 dictMatchIndex = dms->hashTable[dmsH];
698 const U32* const dmsBt = dms->chainTable;
699 commonLengthSmaller = commonLengthLarger = 0;
700 while (nbCompares-- && (dictMatchIndex > dmsLowLimit)) {
701 const U32* const nextPtr = dmsBt + 2*(dictMatchIndex & dmsBtMask);
702 size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */
703 const BYTE* match = dmsBase + dictMatchIndex;
704 matchLength += ZSTD_count_2segments(ip+matchLength, match+matchLength, iLimit, dmsEnd, prefixStart);
705 if (dictMatchIndex+matchLength >= dmsHighLimit)
706 match = base + dictMatchIndex + dmsIndexDelta; /* to prepare for next usage of match[matchLength] */
707
708 if (matchLength > bestLength) {
709 matchIndex = dictMatchIndex + dmsIndexDelta;
710 DEBUGLOG(8, "found dms match of length %u at distance %u (offCode=%u)",
711 (U32)matchLength, current - matchIndex, current - matchIndex + ZSTD_REP_MOVE);
712 if (matchLength > matchEndIdx - matchIndex)
713 matchEndIdx = matchIndex + (U32)matchLength;
714 bestLength = matchLength;
715 matches[mnum].off = (current - matchIndex) + ZSTD_REP_MOVE;
716 matches[mnum].len = (U32)matchLength;
717 mnum++;
718 if ( (matchLength > ZSTD_OPT_NUM)
719 | (ip+matchLength == iLimit) /* equal : no way to know if inf or sup */) {
720 break; /* drop, to guarantee consistency (miss a little bit of compression) */
721 }
722 }
723
724 if (dictMatchIndex <= dmsBtLow) { break; } /* beyond tree size, stop the search */
725 if (match[matchLength] < ip[matchLength]) {
726 commonLengthSmaller = matchLength; /* all smaller will now have at least this guaranteed common length */
727 dictMatchIndex = nextPtr[1]; /* new matchIndex larger than previous (closer to current) */
728 } else {
729 /* match is larger than current */
730 commonLengthLarger = matchLength;
731 dictMatchIndex = nextPtr[0];
732 }
733 }
734 }
735
736 assert(matchEndIdx > current+8);
737 ms->nextToUpdate = matchEndIdx - 8; /* skip repetitive patterns */
738 return mnum;
739}
int memcmp(void *Buffer1, void *Buffer2, ACPI_SIZE Count)
Definition: utclib.c:112
#define NULL
Definition: types.h:112
const ZSTD_matchState_t * dictMatchState
MEM_STATIC U32 ZSTD_getLowestMatchIndex(const ZSTD_matchState_t *ms, U32 current, unsigned windowLog)
#define ZSTD_REP_MOVE
static U32 ZSTD_insertAndFindFirstIndexHash3(ZSTD_matchState_t *ms, U32 *nextToUpdate3, const BYTE *const ip)
Definition: zstd_opt.c:341
MEM_STATIC U32 ZSTD_readMINMATCH(const void *memPtr, U32 length)
Definition: zstd_opt.c:325

Referenced by ZSTD_BtGetAllMatches().

◆ ZSTD_litLengthPrice()

static U32 ZSTD_litLengthPrice ( U32 const  litLength,
const optState_t *const  optPtr,
int  optLevel 
)
static

Definition at line 240 of file zstd_opt.c.

241{
242 if (optPtr->priceType == zop_predef) return WEIGHT(litLength, optLevel);
243
244 /* dynamic statistics */
245 { U32 const llCode = ZSTD_LLcode(litLength);
246 return (LL_bits[llCode] * BITCOST_MULTIPLIER)
247 + optPtr->litLengthSumBasePrice
248 - WEIGHT(optPtr->litLengthFreq[llCode], optLevel);
249 }
250}
unsigned * litLengthFreq
MEM_STATIC U32 ZSTD_LLcode(U32 litLength)
static const U32 LL_bits[MaxLL+1]

Referenced by ZSTD_compressBlock_opt_generic().

◆ ZSTD_rawLiteralsCost()

static U32 ZSTD_rawLiteralsCost ( const BYTE *const  literals,
U32 const  litLength,
const optState_t *const  optPtr,
int  optLevel 
)
static

Definition at line 215 of file zstd_opt.c.

218{
219 if (litLength == 0) return 0;
220
221 if (!ZSTD_compressedLiterals(optPtr))
222 return (litLength << 3) * BITCOST_MULTIPLIER; /* Uncompressed - 8 bytes per literal. */
223
224 if (optPtr->priceType == zop_predef)
225 return (litLength*6) * BITCOST_MULTIPLIER; /* 6 bit per literal - no statistic used */
226
227 /* dynamic statistics */
228 { U32 price = litLength * optPtr->litSumBasePrice;
229 U32 u;
230 for (u=0; u < litLength; u++) {
231 assert(WEIGHT(optPtr->litFreq[literals[u]], optLevel) <= optPtr->litSumBasePrice); /* literal cost should never be negative */
232 price -= WEIGHT(optPtr->litFreq[literals[u]], optLevel);
233 }
234 return price;
235 }
236}
GLsizei GLenum const GLvoid GLsizei GLenum GLbyte GLbyte GLbyte GLdouble GLdouble GLdouble GLfloat GLfloat GLfloat GLint GLint GLint GLshort GLshort GLshort GLubyte GLubyte GLubyte GLuint GLuint GLuint GLushort GLushort GLushort GLbyte GLbyte GLbyte GLbyte GLdouble GLdouble GLdouble GLdouble GLfloat GLfloat GLfloat GLfloat GLint GLint GLint GLint GLshort GLshort GLshort GLshort GLubyte GLubyte GLubyte GLubyte GLuint GLuint GLuint GLuint GLushort GLushort GLushort GLushort GLboolean const GLdouble const GLfloat const GLint const GLshort const GLbyte const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLdouble const GLfloat const GLfloat const GLint const GLint const GLshort const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort GLenum GLenum GLenum GLfloat GLenum GLint GLenum GLenum GLenum GLfloat GLenum GLenum GLint GLenum GLfloat GLenum GLint GLint GLushort GLenum GLenum GLfloat GLenum GLenum GLint GLfloat const GLubyte GLenum GLenum GLenum const GLfloat GLenum GLenum const GLint GLenum GLint GLint GLsizei GLsizei GLint GLenum GLenum const GLvoid GLenum GLenum const GLfloat GLenum GLenum const GLint GLenum GLenum const GLdouble GLenum GLenum const GLfloat GLenum GLenum const GLint GLsizei GLuint GLfloat GLuint GLbitfield GLfloat GLint GLuint GLboolean GLenum GLfloat GLenum GLbitfield GLenum GLfloat GLfloat GLint GLint const GLfloat GLenum GLfloat GLfloat GLint GLint GLfloat GLfloat GLint GLint const GLfloat GLint GLfloat GLfloat GLint GLfloat GLfloat GLint GLfloat GLfloat const GLdouble * u
Definition: glfuncs.h:240
static int ZSTD_compressedLiterals(optState_t const *const optPtr)
Definition: zstd_opt.c:67

Referenced by ZSTD_compressBlock_opt_generic().

◆ ZSTD_readMINMATCH()

MEM_STATIC U32 ZSTD_readMINMATCH ( const void memPtr,
U32  length 
)

Definition at line 325 of file zstd_opt.c.

326{
327 switch (length)
328 {
329 default :
330 case 4 : return MEM_read32(memPtr);
331 case 3 : if (MEM_isLittleEndian())
332 return MEM_read32(memPtr)<<8;
333 else
334 return MEM_read32(memPtr)>>8;
335 }
336}
MEM_STATIC U32 MEM_read32(const void *memPtr)
Definition: mem.h:242
MEM_STATIC unsigned MEM_isLittleEndian(void)
Definition: mem.h:186
GLuint GLsizei GLsizei * length
Definition: glext.h:6040

Referenced by ZSTD_insertBtAndGetAllMatches().

◆ ZSTD_rescaleFreqs()

static void ZSTD_rescaleFreqs ( optState_t *const  optPtr,
const BYTE *const  src,
size_t const  srcSize,
int const  optLevel 
)
static

Definition at line 104 of file zstd_opt.c.

107{
108 int const compressedLiterals = ZSTD_compressedLiterals(optPtr);
109 DEBUGLOG(5, "ZSTD_rescaleFreqs (srcSize=%u)", (unsigned)srcSize);
110 optPtr->priceType = zop_dynamic;
111
112 if (optPtr->litLengthSum == 0) { /* first block : init */
113 if (srcSize <= ZSTD_PREDEF_THRESHOLD) { /* heuristic */
114 DEBUGLOG(5, "(srcSize <= ZSTD_PREDEF_THRESHOLD) => zop_predef");
115 optPtr->priceType = zop_predef;
116 }
117
118 assert(optPtr->symbolCosts != NULL);
119 if (optPtr->symbolCosts->huf.repeatMode == HUF_repeat_valid) {
120 /* huffman table presumed generated by dictionary */
121 optPtr->priceType = zop_dynamic;
122
123 if (compressedLiterals) {
124 unsigned lit;
125 assert(optPtr->litFreq != NULL);
126 optPtr->litSum = 0;
127 for (lit=0; lit<=MaxLit; lit++) {
128 U32 const scaleLog = 11; /* scale to 2K */
129 U32 const bitCost = HUF_getNbBits(optPtr->symbolCosts->huf.CTable, lit);
130 assert(bitCost <= scaleLog);
131 optPtr->litFreq[lit] = bitCost ? 1 << (scaleLog-bitCost) : 1 /*minimum to calculate cost*/;
132 optPtr->litSum += optPtr->litFreq[lit];
133 } }
134
135 { unsigned ll;
136 FSE_CState_t llstate;
137 FSE_initCState(&llstate, optPtr->symbolCosts->fse.litlengthCTable);
138 optPtr->litLengthSum = 0;
139 for (ll=0; ll<=MaxLL; ll++) {
140 U32 const scaleLog = 10; /* scale to 1K */
141 U32 const bitCost = FSE_getMaxNbBits(llstate.symbolTT, ll);
142 assert(bitCost < scaleLog);
143 optPtr->litLengthFreq[ll] = bitCost ? 1 << (scaleLog-bitCost) : 1 /*minimum to calculate cost*/;
144 optPtr->litLengthSum += optPtr->litLengthFreq[ll];
145 } }
146
147 { unsigned ml;
148 FSE_CState_t mlstate;
149 FSE_initCState(&mlstate, optPtr->symbolCosts->fse.matchlengthCTable);
150 optPtr->matchLengthSum = 0;
151 for (ml=0; ml<=MaxML; ml++) {
152 U32 const scaleLog = 10;
153 U32 const bitCost = FSE_getMaxNbBits(mlstate.symbolTT, ml);
154 assert(bitCost < scaleLog);
155 optPtr->matchLengthFreq[ml] = bitCost ? 1 << (scaleLog-bitCost) : 1 /*minimum to calculate cost*/;
156 optPtr->matchLengthSum += optPtr->matchLengthFreq[ml];
157 } }
158
159 { unsigned of;
160 FSE_CState_t ofstate;
161 FSE_initCState(&ofstate, optPtr->symbolCosts->fse.offcodeCTable);
162 optPtr->offCodeSum = 0;
163 for (of=0; of<=MaxOff; of++) {
164 U32 const scaleLog = 10;
165 U32 const bitCost = FSE_getMaxNbBits(ofstate.symbolTT, of);
166 assert(bitCost < scaleLog);
167 optPtr->offCodeFreq[of] = bitCost ? 1 << (scaleLog-bitCost) : 1 /*minimum to calculate cost*/;
168 optPtr->offCodeSum += optPtr->offCodeFreq[of];
169 } }
170
171 } else { /* not a dictionary */
172
173 assert(optPtr->litFreq != NULL);
174 if (compressedLiterals) {
175 unsigned lit = MaxLit;
176 HIST_count_simple(optPtr->litFreq, &lit, src, srcSize); /* use raw first block to init statistics */
177 optPtr->litSum = ZSTD_downscaleStat(optPtr->litFreq, MaxLit, 1);
178 }
179
180 { unsigned ll;
181 for (ll=0; ll<=MaxLL; ll++)
182 optPtr->litLengthFreq[ll] = 1;
183 }
184 optPtr->litLengthSum = MaxLL+1;
185
186 { unsigned ml;
187 for (ml=0; ml<=MaxML; ml++)
188 optPtr->matchLengthFreq[ml] = 1;
189 }
190 optPtr->matchLengthSum = MaxML+1;
191
192 { unsigned of;
193 for (of=0; of<=MaxOff; of++)
194 optPtr->offCodeFreq[of] = 1;
195 }
196 optPtr->offCodeSum = MaxOff+1;
197
198 }
199
200 } else { /* new block : re-use previous statistics, scaled down */
201
202 if (compressedLiterals)
203 optPtr->litSum = ZSTD_downscaleStat(optPtr->litFreq, MaxLit, 1);
204 optPtr->litLengthSum = ZSTD_downscaleStat(optPtr->litLengthFreq, MaxLL, 0);
206 optPtr->offCodeSum = ZSTD_downscaleStat(optPtr->offCodeFreq, MaxOff, 0);
207 }
208
209 ZSTD_setBasePrices(optPtr, optLevel);
210}
w ll
Definition: byte_order.h:167
unsigned HIST_count_simple(unsigned *count, unsigned *maxSymbolValuePtr, const void *src, size_t srcSize)
Definition: hist.c:29
U32 HUF_getNbBits(const void *symbolTable, U32 symbolValue)
Definition: huf_compress.c:200
FSE_CTable offcodeCTable[FSE_CTABLE_SIZE_U32(OffFSELog, MaxOff)]
FSE_CTable litlengthCTable[FSE_CTABLE_SIZE_U32(LLFSELog, MaxLL)]
FSE_CTable matchlengthCTable[FSE_CTABLE_SIZE_U32(MLFSELog, MaxML)]
U32 CTable[HUF_CTABLE_SIZE_U32(255)]
const ZSTD_entropyCTables_t * symbolCosts
#define MaxLit
#define MaxLL
#define MaxML
#define MaxOff
static U32 ZSTD_downscaleStat(unsigned *table, U32 lastEltIndex, int malus)
Definition: zstd_opt.c:85

Referenced by ZSTD_compressBlock_opt_generic().

◆ ZSTD_setBasePrices()

static void ZSTD_setBasePrices ( optState_t optPtr,
int  optLevel 
)
static

Definition at line 72 of file zstd_opt.c.

73{
74 if (ZSTD_compressedLiterals(optPtr))
75 optPtr->litSumBasePrice = WEIGHT(optPtr->litSum, optLevel);
76 optPtr->litLengthSumBasePrice = WEIGHT(optPtr->litLengthSum, optLevel);
77 optPtr->matchLengthSumBasePrice = WEIGHT(optPtr->matchLengthSum, optLevel);
78 optPtr->offCodeSumBasePrice = WEIGHT(optPtr->offCodeSum, optLevel);
79}

Referenced by ZSTD_compressBlock_opt_generic(), and ZSTD_rescaleFreqs().

◆ ZSTD_totalLen()

static U32 ZSTD_totalLen ( ZSTD_optimal_t  sol)
static

Definition at line 773 of file zstd_opt.c.

774{
775 return sol.litlen + sol.mlen;
776}

Referenced by ZSTD_compressBlock_opt_generic().

◆ ZSTD_updateStats()

static void ZSTD_updateStats ( optState_t *const  optPtr,
U32  litLength,
const BYTE literals,
U32  offsetCode,
U32  matchLength 
)
static

Definition at line 288 of file zstd_opt.c.

291{
292 /* literals */
293 if (ZSTD_compressedLiterals(optPtr)) {
294 U32 u;
295 for (u=0; u < litLength; u++)
296 optPtr->litFreq[literals[u]] += ZSTD_LITFREQ_ADD;
297 optPtr->litSum += litLength*ZSTD_LITFREQ_ADD;
298 }
299
300 /* literal Length */
301 { U32 const llCode = ZSTD_LLcode(litLength);
302 optPtr->litLengthFreq[llCode]++;
303 optPtr->litLengthSum++;
304 }
305
306 /* match offset code (0-2=>repCode; 3+=>offset+2) */
307 { U32 const offCode = ZSTD_highbit32(offsetCode+1);
308 assert(offCode <= MaxOff);
309 optPtr->offCodeFreq[offCode]++;
310 optPtr->offCodeSum++;
311 }
312
313 /* match Length */
314 { U32 const mlBase = matchLength - MINMATCH;
315 U32 const mlCode = ZSTD_MLcode(mlBase);
316 optPtr->matchLengthFreq[mlCode]++;
317 optPtr->matchLengthSum++;
318 }
319}
#define ZSTD_LITFREQ_ADD
Definition: zstd_opt.c:16

Referenced by ZSTD_compressBlock_opt_generic().

◆ ZSTD_updateTree()

void ZSTD_updateTree ( ZSTD_matchState_t ms,
const BYTE ip,
const BYTE iend 
)

Definition at line 504 of file zstd_opt.c.

504 {
505 ZSTD_updateTree_internal(ms, ip, iend, ms->cParams.minMatch, ZSTD_noDict);
506}

Referenced by ZSTD_loadDictionaryContent().

◆ ZSTD_updateTree_internal()

FORCE_INLINE_TEMPLATE void ZSTD_updateTree_internal ( ZSTD_matchState_t ms,
const BYTE *const  ip,
const BYTE *const  iend,
const U32  mls,
const ZSTD_dictMode_e  dictMode 
)

Definition at line 483 of file zstd_opt.c.

487{
488 const BYTE* const base = ms->window.base;
489 U32 const target = (U32)(ip - base);
490 U32 idx = ms->nextToUpdate;
491 DEBUGLOG(6, "ZSTD_updateTree_internal, from %u to %u (dictMode:%u)",
492 idx, target, dictMode);
493
494 while(idx < target) {
495 U32 const forward = ZSTD_insertBt1(ms, base+idx, iend, mls, dictMode == ZSTD_extDict);
496 assert(idx < (U32)(idx + forward));
497 idx += forward;
498 }
499 assert((size_t)(ip - base) <= (size_t)(U32)(-1));
500 assert((size_t)(iend - base) <= (size_t)(U32)(-1));
501 ms->nextToUpdate = target;
502}
static U32 ZSTD_insertBt1(ZSTD_matchState_t *ms, const BYTE *const ip, const BYTE *const iend, U32 const mls, const int extDict)
Definition: zstd_opt.c:369

Referenced by ZSTD_BtGetAllMatches(), and ZSTD_updateTree().

◆ ZSTD_upscaleStat()

static U32 ZSTD_upscaleStat ( unsigned table,
U32  lastEltIndex,
int  bonus 
)
static

Definition at line 1079 of file zstd_opt.c.

1080{
1081 U32 s, sum=0;
1082 assert(ZSTD_FREQ_DIV+bonus >= 0);
1083 for (s=0; s<lastEltIndex+1; s++) {
1084 table[s] <<= ZSTD_FREQ_DIV+bonus;
1085 table[s]--;
1086 sum += table[s];
1087 }
1088 return sum;
1089}

Referenced by ZSTD_upscaleStats().

◆ ZSTD_upscaleStats()

MEM_STATIC void ZSTD_upscaleStats ( optState_t optPtr)

Definition at line 1092 of file zstd_opt.c.

1093{
1094 if (ZSTD_compressedLiterals(optPtr))
1095 optPtr->litSum = ZSTD_upscaleStat(optPtr->litFreq, MaxLit, 0);
1096 optPtr->litLengthSum = ZSTD_upscaleStat(optPtr->litLengthFreq, MaxLL, 0);
1098 optPtr->offCodeSum = ZSTD_upscaleStat(optPtr->offCodeFreq, MaxOff, 0);
1099}
static U32 ZSTD_upscaleStat(unsigned *table, U32 lastEltIndex, int bonus)
Definition: zstd_opt.c:1079

Referenced by ZSTD_initStats_ultra().