Lines Matching refs:matchIndex

45         U32    const matchIndex = hashTable[h];
52 *nextCandidatePtr = matchIndex; /* update BT like a chain */
84 U32 matchIndex = *smallerPtr; /* this candidate is unsorted : next sorted candidate is reached through *smallerPtr, while *largerPtr contains previous unsorted candidate (which is already saved and can be overwritten) */
96 for (; nbCompares && (matchIndex > windowLow); --nbCompares) {
97 U32* const nextPtr = bt + 2*(matchIndex & btMask);
99 assert(matchIndex < curr);
105 || (matchIndex+matchLength >= dictLimit) /* both in current segment*/
108 || (matchIndex+matchLength >= dictLimit)) ?
110 assert( (matchIndex+matchLength >= dictLimit) /* might be wrong if extDict is incorrectly set to 0 */
112 match = mBase + matchIndex;
115 match = dictBase + matchIndex;
117 if (matchIndex+matchLength >= dictLimit)
118 match = base + matchIndex; /* preparation for next read of match[matchLength] */
122 curr, matchIndex, (U32)matchLength);
130 *smallerPtr = matchIndex; /* update smaller idx */
132 if (matchIndex <= btLow) { smallerPtr=&dummy32; break; } /* beyond tree size, stop searching */
134 matchIndex, btLow, nextPtr[1]);
136 matchIndex = nextPtr[1]; /* new matchIndex, larger than previous and closer to current */
139 *largerPtr = matchIndex;
141 if (matchIndex <= btLow) { largerPtr=&dummy32; break; } /* beyond tree size, stop searching */
143 matchIndex, btLow, nextPtr[0]);
145 matchIndex = nextPtr[0];
197 U32 matchIndex = dictMatchIndex + dictIndexDelta;
198 if ( (4*(int)(matchLength-bestLength)) > (int)(ZSTD_highbit32(curr-matchIndex+1) - ZSTD_highbit32((U32)offsetPtr[0]+1)) ) {
199 DEBUGLOG(9, "ZSTD_DUBT_findBetterDictMatch(%u) : found better match length %u -> %u and offsetCode %u -> %u (dictMatchIndex %u, matchIndex %u)",
200 curr, (U32)bestLength, (U32)matchLength, (U32)*offsetPtr, STORE_OFFSET(curr - matchIndex), dictMatchIndex, matchIndex);
201 bestLength = matchLength, *offsetPtr = STORE_OFFSET(curr - matchIndex);
211 dictMatchIndex = nextPtr[1]; /* new matchIndex larger than previous (closer to current) */
241 U32 matchIndex = hashTable[h];
253 U32* nextCandidate = bt + 2*(matchIndex&btMask);
254 U32* unsortedMark = bt + 2*(matchIndex&btMask) + 1;
264 while ( (matchIndex > unsortLimit)
268 matchIndex);
270 previousCandidate = matchIndex;
271 matchIndex = *nextCandidate;
272 nextCandidate = bt + 2*(matchIndex&btMask);
273 unsortedMark = bt + 2*(matchIndex&btMask) + 1;
279 if ( (matchIndex > unsortLimit)
282 matchIndex);
287 matchIndex = previousCandidate;
288 while (matchIndex) { /* will end on matchIndex == 0 */
289 U32* const nextCandidateIdxPtr = bt + 2*(matchIndex&btMask) + 1;
291 ZSTD_insertDUBT1(ms, matchIndex, iend,
293 matchIndex = nextCandidateIdx;
309 matchIndex = hashTable[h];
312 for (; nbCompares && (matchIndex > windowLow); --nbCompares) {
313 U32* const nextPtr = bt + 2*(matchIndex & btMask);
317 if ((dictMode != ZSTD_extDict) || (matchIndex+matchLength >= dictLimit)) {
318 match = base + matchIndex;
321 match = dictBase + matchIndex;
323 if (matchIndex+matchLength >= dictLimit)
324 match = base + matchIndex; /* to prepare for next usage of match[matchLength] */
328 if (matchLength > matchEndIdx - matchIndex)
329 matchEndIdx = matchIndex + (U32)matchLength;
330 if ( (4*(int)(matchLength-bestLength)) > (int)(ZSTD_highbit32(curr-matchIndex+1) - ZSTD_highbit32((U32)offsetPtr[0]+1)) )
331 bestLength = matchLength, *offsetPtr = STORE_OFFSET(curr - matchIndex);
344 *smallerPtr = matchIndex; /* update smaller idx */
346 if (matchIndex <= btLow) { smallerPtr=&dummy32; break; } /* beyond tree size, stop the search */
348 matchIndex = nextPtr[1]; /* new matchIndex larger than previous (closer to current) */
351 *largerPtr = matchIndex;
353 if (matchIndex <= btLow) { largerPtr=&dummy32; break; } /* beyond tree size, stop the search */
355 matchIndex = nextPtr[0];
529 U32 matchIndex;
545 matchIndex = dms->hashTable[ddsIdx + ddsAttempt];
546 match = ddsBase + matchIndex;
548 if (!matchIndex) {
554 assert(matchIndex >= ddsLowestIndex);
557 /* assumption : matchIndex <= dictLimit-4 (by table construction) */
564 *offsetPtr = STORE_OFFSET(curr - (matchIndex + ddsIndexDelta));
587 matchIndex = dms->chainTable[chainIndex];
588 match = ddsBase + matchIndex;
591 assert(matchIndex >= ddsLowestIndex);
594 /* assumption : matchIndex <= dictLimit-4 (by table construction) */
601 *offsetPtr = STORE_OFFSET(curr - (matchIndex + ddsIndexDelta));
679 U32 matchIndex;
687 matchIndex = ZSTD_insertAndFindFirstIndex_internal(ms, cParams, ip, mls);
689 for ( ; (matchIndex>=lowLimit) & (nbAttempts>0) ; nbAttempts--) {
691 if ((dictMode != ZSTD_extDict) || matchIndex >= dictLimit) {
692 const BYTE* const match = base + matchIndex;
693 assert(matchIndex >= dictLimit); /* ensures this is true if dictMode != ZSTD_extDict */
697 const BYTE* const match = dictBase + matchIndex;
699 if (MEM_read32(match) == MEM_read32(ip)) /* assumption : matchIndex <= dictLimit-4 (by table construction) */
706 *offsetPtr = STORE_OFFSET(curr - matchIndex);
710 if (matchIndex <= minChain) break;
711 matchIndex = NEXT_IN_CHAIN(matchIndex, chainMask);
729 matchIndex = dms->hashTable[ZSTD_hashPtr(ip, dms->cParams.hashLog, mls)];
731 for ( ; (matchIndex>=dmsLowestIndex) & (nbAttempts>0) ; nbAttempts--) {
733 const BYTE* const match = dmsBase + matchIndex;
735 if (MEM_read32(match) == MEM_read32(ip)) /* assumption : matchIndex <= dictLimit-4 (by table construction) */
741 assert(curr > matchIndex + dmsIndexDelta);
742 *offsetPtr = STORE_OFFSET(curr - (matchIndex + dmsIndexDelta));
746 if (matchIndex <= dmsMinChain) break;
748 matchIndex = dmsChainTable[matchIndex & dmsChainMask];
1197 U32 const matchIndex = row[matchPos];
1199 if (matchIndex < lowLimit)
1201 if ((dictMode != ZSTD_extDict) || matchIndex >= dictLimit) {
1202 PREFETCH_L1(base + matchIndex);
1204 PREFETCH_L1(dictBase + matchIndex);
1206 matchBuffer[numMatches++] = matchIndex;
1219 U32 const matchIndex = matchBuffer[currMatch];
1221 assert(matchIndex < curr);
1222 assert(matchIndex >= lowLimit);
1224 if ((dictMode != ZSTD_extDict) || matchIndex >= dictLimit) {
1225 const BYTE* const match = base + matchIndex;
1226 assert(matchIndex >= dictLimit); /* ensures this is true if dictMode != ZSTD_extDict */
1230 const BYTE* const match = dictBase + matchIndex;
1232 if (MEM_read32(match) == MEM_read32(ip)) /* assumption : matchIndex <= dictLimit-4 (by table construction) */
1239 *offsetPtr = STORE_OFFSET(curr - matchIndex);
1265 U32 const matchIndex = dmsRow[matchPos];
1266 if (matchIndex < dmsLowestIndex)
1268 PREFETCH_L1(dmsBase + matchIndex);
1269 matchBuffer[numMatches++] = matchIndex;
1274 U32 const matchIndex = matchBuffer[currMatch];
1276 assert(matchIndex >= dmsLowestIndex);
1277 assert(matchIndex < curr);
1279 { const BYTE* const match = dmsBase + matchIndex;
1287 assert(curr > matchIndex + dmsIndexDelta);
1288 *offsetPtr = STORE_OFFSET(curr - (matchIndex + dmsIndexDelta));
1663 U32 const matchIndex = (U32)((size_t)(start-base) - STORED_OFFSET(offcode));
1664 const BYTE* match = (matchIndex < prefixLowestIndex) ? dictBase + matchIndex - dictIndexDelta : base + matchIndex;
1665 const BYTE* const mStart = (matchIndex < prefixLowestIndex) ? dictLowest : prefixLowest;
2005 U32 const matchIndex = (U32)((size_t)(start-base) - STORED_OFFSET(offcode));
2006 const BYTE* match = (matchIndex < dictLimit) ? dictBase + matchIndex : base + matchIndex;
2007 const BYTE* const mStart = (matchIndex < dictLimit) ? dictStart : prefixStart;