/kernel/linux/linux-6.6/lib/zstd/compress/ |
H A D | zstd_double_fast.c | 26 const BYTE* const iend = ((const BYTE*)end) - HASH_READ_SIZE; in ZSTD_fillDoubleHashTable() local 33 for (; ip + fastHashFillStep - 1 <= iend; ip += fastHashFillStep) { in ZSTD_fillDoubleHashTable() 67 const BYTE* const iend = istart + srcSize; in ZSTD_compressBlock_doubleFast_noDict_generic() local 68 const BYTE* const ilimit = iend - HASH_READ_SIZE; in ZSTD_compressBlock_doubleFast_noDict_generic() 132 mLength = ZSTD_count(ip+1+4, ip+1+4-offset_1, iend) + 4; in ZSTD_compressBlock_doubleFast_noDict_generic() 134 ZSTD_storeSeq(seqStore, (size_t)(ip-anchor), anchor, iend, STORE_REPCODE_1, mLength); in ZSTD_compressBlock_doubleFast_noDict_generic() 143 mLength = ZSTD_count(ip+8, matchl0+8, iend) + 8; in ZSTD_compressBlock_doubleFast_noDict_generic() 183 return (size_t)(iend - anchor); in ZSTD_compressBlock_doubleFast_noDict_generic() 191 mLength = ZSTD_count(ip+8, matchl1+8, iend) + 8; in ZSTD_compressBlock_doubleFast_noDict_generic() 199 mLength = ZSTD_count(ip+4, matchs0+4, iend) in ZSTD_compressBlock_doubleFast_noDict_generic() 275 const BYTE* const iend = istart + srcSize; ZSTD_compressBlock_doubleFast_dictMatchState_generic() local 543 const BYTE* const iend = istart + srcSize; ZSTD_compressBlock_doubleFast_extDict_generic() local [all...] |
H A D | zstd_fast.c | 25 const BYTE* const iend = ((const BYTE*)end) - HASH_READ_SIZE; in ZSTD_fillHashTable() local 31 for ( ; ip + fastHashFillStep < iend + 2; ip += fastHashFillStep) { in ZSTD_fillHashTable() 108 const BYTE* const iend = istart + srcSize; in ZSTD_compressBlock_fast_noDict_generic() local 109 const BYTE* const ilimit = iend - HASH_READ_SIZE; in ZSTD_compressBlock_fast_noDict_generic() 262 return (size_t)(iend - anchor); in ZSTD_compressBlock_fast_noDict_generic() 283 mLength += ZSTD_count(ip0 + mLength, match0 + mLength, iend); in ZSTD_compressBlock_fast_noDict_generic() 285 ZSTD_storeSeq(seqStore, (size_t)(ip0 - anchor), anchor, iend, offcode, mLength); in ZSTD_compressBlock_fast_noDict_generic() 299 hashTable[ZSTD_hashPtr(base+current0+2, hlog, mls)] = current0+2; /* here because current+2 could be > iend-8 */ in ZSTD_compressBlock_fast_noDict_generic() 305 size_t const rLength = ZSTD_count(ip0+4, ip0+4-rep_offset2, iend) + 4; in ZSTD_compressBlock_fast_noDict_generic() 309 ZSTD_storeSeq(seqStore, 0 /*litLen*/, anchor, iend, STORE_REPCODE_ in ZSTD_compressBlock_fast_noDict_generic() 387 const BYTE* const iend = istart + srcSize; ZSTD_compressBlock_fast_dictMatchState_generic() local 570 const BYTE* const iend = istart + srcSize; ZSTD_compressBlock_fast_extDict_generic() local [all...] |
H A D | zstd_ldm.c | 240 const BYTE* const iend = (const BYTE*)end; in ZSTD_ldm_fillFastTables() local 245 ZSTD_fillHashTable(ms, iend, ZSTD_dtlm_fast); in ZSTD_ldm_fillFastTables() 249 ZSTD_fillDoubleHashTable(ms, iend, ZSTD_dtlm_fast); in ZSTD_ldm_fillFastTables() 269 const BYTE* iend, ldmParams_t const* params) in ZSTD_ldm_fillHashTable() 282 while (ip < iend) { in ZSTD_ldm_fillHashTable() 287 hashed = ZSTD_ldm_gear_feed(&hashState, ip, iend - ip, splits, &numSplits); in ZSTD_ldm_fillHashTable() 340 BYTE const* const iend = istart + srcSize; in ZSTD_ldm_generateSequences_internal() local 341 BYTE const* const ilimit = iend - HASH_READ_SIZE; in ZSTD_ldm_generateSequences_internal() 353 return iend - anchor; in ZSTD_ldm_generateSequences_internal() 414 cur->offset < dictLimit ? dictEnd : iend; in ZSTD_ldm_generateSequences_internal() 267 ZSTD_ldm_fillHashTable( ldmState_t* ldmState, const BYTE* ip, const BYTE* iend, ldmParams_t const* params) ZSTD_ldm_fillHashTable() argument 511 BYTE const* const iend = istart + srcSize; ZSTD_ldm_generateSequences() local 671 BYTE const* const iend = istart + srcSize; ZSTD_ldm_blockCompress() local [all...] |
H A D | zstd_lazy.c | 21 const BYTE* ip, const BYTE* iend, in ZSTD_updateDUBT() 39 assert(ip + 8 <= iend); /* condition for ZSTD_hashPtr */ in ZSTD_updateDUBT() 40 (void)iend; in ZSTD_updateDUBT() 44 size_t const h = ZSTD_hashPtr(base + idx, hashLog, mls); /* assumption : ip + 8 <= iend */ in ZSTD_updateDUBT() 78 const BYTE* const iend = (curr>=dictLimit) ? inputEnd : dictBase + dictLimit; in ZSTD_insertDUBT1() local 94 assert(ip < iend); /* condition for ZSTD_count */ in ZSTD_insertDUBT1() 113 matchLength += ZSTD_count(ip+matchLength, match+matchLength, iend); in ZSTD_insertDUBT1() 116 matchLength += ZSTD_count_2segments(ip+matchLength, match+matchLength, iend, dictEnd, prefixStart); in ZSTD_insertDUBT1() 124 if (ip+matchLength == iend) { /* equal : no way to know if inf or sup */ in ZSTD_insertDUBT1() 155 const BYTE* const ip, const BYTE* const iend, in ZSTD_DUBT_findBetterDictMatch() 20 ZSTD_updateDUBT(ZSTD_matchState_t* ms, const BYTE* ip, const BYTE* iend, U32 mls) ZSTD_updateDUBT() argument 153 ZSTD_DUBT_findBetterDictMatch( const ZSTD_matchState_t* ms, const BYTE* const ip, const BYTE* const iend, size_t* offsetPtr, size_t bestLength, U32 nbCompares, U32 const mls, const ZSTD_dictMode_e dictMode) ZSTD_DUBT_findBetterDictMatch() argument 231 ZSTD_DUBT_findBestMatch(ZSTD_matchState_t* ms, const BYTE* const ip, const BYTE* const iend, size_t* offsetPtr, U32 const mls, const ZSTD_dictMode_e dictMode) ZSTD_DUBT_findBestMatch() argument 1448 ZSTD_searchMax( ZSTD_matchState_t* ms, const BYTE* ip, const BYTE* iend, size_t* offsetPtr, U32 const mls, U32 const rowLog, searchMethod_e const searchMethod, ZSTD_dictMode_e const dictMode) ZSTD_searchMax() argument 1486 const BYTE* const iend = istart + srcSize; ZSTD_compressBlock_lazy_generic() local 1873 const BYTE* const iend = istart + srcSize; ZSTD_compressBlock_lazy_extDict_generic() local [all...] |
H A D | hist.c | 73 const BYTE* const iend = ip+sourceSize; in HIST_count_parallel_wksp() local 92 while (ip < iend-15) { in HIST_count_parallel_wksp() 118 while (ip<iend) Counting1[*ip++]++; in HIST_count_parallel_wksp()
|
H A D | zstd_opt.c | 328 * assumption : literals + litLengtn <= iend */ 408 * @param ip assumed <= iend-8 . 413 const BYTE* const ip, const BYTE* const iend, in ZSTD_insertBt1() 454 assert(ip <= iend-8); /* required for h calculation */ in ZSTD_insertBt1() 487 matchLength += ZSTD_count(ip+matchLength, match+matchLength, iend); in ZSTD_insertBt1() 490 matchLength += ZSTD_count_2segments(ip+matchLength, match+matchLength, iend, dictEnd, prefixStart); in ZSTD_insertBt1() 501 if (ip+matchLength == iend) { /* equal : no way to know if inf or sup */ in ZSTD_insertBt1() 532 const BYTE* const ip, const BYTE* const iend, in ZSTD_updateTree_internal() 542 U32 const forward = ZSTD_insertBt1(ms, base+idx, iend, target, mls, dictMode == ZSTD_extDict); in ZSTD_updateTree_internal() 547 assert((size_t)(iend in ZSTD_updateTree_internal() 411 ZSTD_insertBt1( const ZSTD_matchState_t* ms, const BYTE* const ip, const BYTE* const iend, U32 const target, U32 const mls, const int extDict) ZSTD_insertBt1() argument 530 ZSTD_updateTree_internal( ZSTD_matchState_t* ms, const BYTE* const ip, const BYTE* const iend, const U32 mls, const ZSTD_dictMode_e dictMode) ZSTD_updateTree_internal() argument 551 ZSTD_updateTree(ZSTD_matchState_t* ms, const BYTE* ip, const BYTE* iend) ZSTD_updateTree() argument 1048 const BYTE* const iend = istart + srcSize; ZSTD_compressBlock_opt_generic() local [all...] |
H A D | zstd_compress_superblock.c | 442 BYTE const* const iend = ip + srcSize; in ZSTD_compressSubBlock_multi() local 499 assert(ip + decompressedSize <= iend); in ZSTD_compressSubBlock_multi() 530 if (ip < iend) { in ZSTD_compressSubBlock_multi() 531 size_t const cSize = ZSTD_noCompressBlock(op, oend - op, ip, iend - ip, lastBlock); in ZSTD_compressSubBlock_multi() 532 DEBUGLOG(5, "ZSTD_compressSubBlock_multi last sub-block uncompressed, %zu bytes", (size_t)(iend - ip)); in ZSTD_compressSubBlock_multi()
|
H A D | zstd_ldm.h | 26 const BYTE* iend, ldmParams_t const* params);
|
H A D | zstd_opt.h | 18 void ZSTD_updateTree(ZSTD_matchState_t* ms, const BYTE* ip, const BYTE* iend);
|
/kernel/linux/linux-5.10/lib/zstd/ |
H A D | entropy_common.c | 62 const BYTE *const iend = istart + hbSize; in FSE_readNCount() local 90 if (ip < iend - 5) { in FSE_readNCount() 109 if ((ip <= iend - 7) || (ip + (bitCount >> 3) <= iend - 4)) { in FSE_readNCount() 140 if ((ip <= iend - 7) || (ip + (bitCount >> 3) <= iend - 4)) { in FSE_readNCount() 144 bitCount -= (int)(8 * (iend - 4 - ip)); in FSE_readNCount() 145 ip = iend - 4; in FSE_readNCount()
|
H A D | compress.c | 998 const BYTE *const iend = ((const BYTE *)end) - HASH_READ_SIZE; in ZSTD_fillHashTable() local 1001 while (ip <= iend) { in ZSTD_fillHashTable() 1019 const BYTE *const iend = istart + srcSize; in ZSTD_compressBlock_fast_generic() local 1020 const BYTE *const ilimit = iend - HASH_READ_SIZE; in ZSTD_compressBlock_fast_generic() 1044 mLength = ZSTD_count(ip + 1 + 4, ip + 1 + 4 - offset_1, iend) + 4; in ZSTD_compressBlock_fast_generic() 1053 mLength = ZSTD_count(ip + 4, match + 4, iend) + 4; in ZSTD_compressBlock_fast_generic() 1072 hashTable[ZSTD_hashPtr(base + curr + 2, hBits, mls)] = curr + 2; /* here because curr+2 could be > iend-8 */ in ZSTD_compressBlock_fast_generic() 1077 size_t const rLength = ZSTD_count(ip + 4, ip + 4 - offset_2, iend) + 4; in ZSTD_compressBlock_fast_generic() 1098 size_t const lastLLSize = iend - anchor; in ZSTD_compressBlock_fast_generic() 1131 const BYTE *const iend in ZSTD_compressBlock_fast_extDict_generic() local 1243 const BYTE *const iend = ((const BYTE *)end) - HASH_READ_SIZE; ZSTD_fillDoubleHashTable() local 1267 const BYTE *const iend = istart + srcSize; ZSTD_compressBlock_doubleFast_generic() local 1412 const BYTE *const iend = istart + srcSize; ZSTD_compressBlock_doubleFast_extDict_generic() local 1561 ZSTD_insertBt1(ZSTD_CCtx *zc, const BYTE *const ip, const U32 mls, const BYTE *const iend, U32 nbCompares, U32 extDict) ZSTD_insertBt1() argument 1643 ZSTD_insertBtAndFindBestMatch(ZSTD_CCtx *zc, const BYTE *const ip, const BYTE *const iend, size_t *offsetPtr, U32 nbCompares, const U32 mls, U32 extDict) ZSTD_insertBtAndFindBestMatch() argument 1724 ZSTD_updateTree(ZSTD_CCtx *zc, const BYTE *const ip, const BYTE *const iend, const U32 nbCompares, const U32 mls) ZSTD_updateTree() argument 1755 ZSTD_updateTree_extDict(ZSTD_CCtx *zc, const BYTE *const ip, const BYTE *const iend, const U32 nbCompares, const U32 mls) ZSTD_updateTree_extDict() argument 1903 const BYTE *const iend = istart + srcSize; ZSTD_compressBlock_lazy_generic() local 2064 const BYTE *const iend = istart + srcSize; ZSTD_compressBlock_lazy_extDict_generic() local 2560 const BYTE *const iend = ip + srcSize; ZSTD_loadDictionaryContent() local 3133 const char *const iend = istart + *srcSizePtr; ZSTD_compressStream_generic() local [all...] |
H A D | zstd_opt.h | 413 const BYTE *const iend = istart + srcSize; in ZSTD_compressBlock_opt_generic() local 414 const BYTE *const ilimit = iend - 8; in ZSTD_compressBlock_opt_generic() 453 mlen = (U32)ZSTD_count(ip + minMatch, ip + minMatch - repCur, iend) + minMatch; in ZSTD_compressBlock_opt_generic() 472 match_num = ZSTD_BtGetAllMatches_selectMLS(ctx, ip, iend, maxSearches, mls, matches, minMatch); in ZSTD_compressBlock_opt_generic() 557 mlen = (U32)ZSTD_count(inr + minMatch, inr + minMatch - repCur, iend) + minMatch; in ZSTD_compressBlock_opt_generic() 591 match_num = ZSTD_BtGetAllMatches_selectMLS(ctx, inr, iend, maxSearches, mls, matches, best_mlen); in ZSTD_compressBlock_opt_generic() 693 size_t const lastLLSize = iend - anchor; in ZSTD_compressBlock_opt_generic() 706 const BYTE *const iend = istart + srcSize; in ZSTD_compressBlock_opt_extDict_generic() local 707 const BYTE *const ilimit = iend - 8; in ZSTD_compressBlock_opt_extDict_generic() 757 const BYTE *const repEnd = repIndex < dictLimit ? dictEnd : iend; in ZSTD_compressBlock_opt_extDict_generic() [all...] |
H A D | decompress.c | 797 const BYTE *const iend = istart + srcSize; in ZSTD_decodeSeqHeaders() local 813 if (ip + 2 > iend) in ZSTD_decodeSeqHeaders() 817 if (ip >= iend) in ZSTD_decodeSeqHeaders() 826 if (ip + 4 > iend) in ZSTD_decodeSeqHeaders() 836 size_t const llhSize = ZSTD_buildSeqTable(dctx->entropy.LLTable, &dctx->LLTptr, LLtype, MaxLL, LLFSELog, ip, iend - ip, in ZSTD_decodeSeqHeaders() 843 size_t const ofhSize = ZSTD_buildSeqTable(dctx->entropy.OFTable, &dctx->OFTptr, OFtype, MaxOff, OffFSELog, ip, iend - ip, in ZSTD_decodeSeqHeaders() 850 size_t const mlhSize = ZSTD_buildSeqTable(dctx->entropy.MLTable, &dctx->MLTptr, MLtype, MaxML, MLFSELog, ip, iend - ip, in ZSTD_decodeSeqHeaders() 1096 const BYTE *const iend = ip + seqSize; in ZSTD_decompressSequences() local 1124 CHECK_E(BIT_initDStream(&seqState.DStream, ip, iend - ip), corruption_detected); in ZSTD_decompressSequences() 1351 const BYTE *const iend in ZSTD_decompressSequencesLong() local 2301 const char *const iend = (const char *)(input->src) + input->size; ZSTD_decompressStream() local [all...] |
H A D | fse_compress.c | 361 const BYTE *const iend = ip + sourceSize; in FSE_count_parallel_wksp() local 384 while (ip < iend - 15) { in FSE_count_parallel_wksp() 418 while (ip < iend) in FSE_count_parallel_wksp() 727 const BYTE *const iend = istart + srcSize; in FSE_compress_usingCTable_generic() local 728 const BYTE *ip = iend; in FSE_compress_usingCTable_generic()
|
/kernel/linux/linux-6.6/lib/zstd/common/ |
H A D | entropy_common.c | 63 const BYTE* const iend = istart + hbSize; in FSE_readNCount_body() local 108 if (LIKELY(ip <= iend-7)) { in FSE_readNCount_body() 111 bitCount -= (int)(8 * (iend - 7 - ip)); in FSE_readNCount_body() 113 ip = iend - 4; in FSE_readNCount_body() 137 if (LIKELY(ip <= iend-7) || (ip + (bitCount>>3) <= iend-4)) { in FSE_readNCount_body() 142 bitCount -= (int)(8 * (iend - 4 - ip)); in FSE_readNCount_body() 144 ip = iend - 4; in FSE_readNCount_body() 186 if (LIKELY(ip <= iend-7) || (ip + (bitCount>>3) <= iend in FSE_readNCount_body() [all...] |
/kernel/linux/linux-5.10/lib/lz4/ |
H A D | lz4_decompress.c | 84 const BYTE * const iend = ip + srcSize; in LZ4_decompress_generic() local 98 const BYTE *const shortiend = iend - in LZ4_decompress_generic() 130 /* ip < iend before the increment */ in LZ4_decompress_generic() 131 assert(!endOnInput || ip <= iend); in LZ4_decompress_generic() 195 if (unlikely(endOnInput ? ip >= iend - RUN_MASK : 0)) { in LZ4_decompress_generic() 203 ? ip < iend - RUN_MASK in LZ4_decompress_generic() 225 || (ip + length > iend - (2 + 1 + LASTLITERALS)))) in LZ4_decompress_generic() 237 && (ip + length > iend)) { in LZ4_decompress_generic() 256 && ((ip + length != iend) in LZ4_decompress_generic() 279 if (!partialDecoding || (cpy == oend) || (ip >= (iend in LZ4_decompress_generic() [all...] |
H A D | lz4_compress.c | 196 const BYTE * const iend = ip + inputSize; in LZ4_compress_generic() local 197 const BYTE * const mflimit = iend - MFLIMIT; in LZ4_compress_generic() 198 const BYTE * const matchlimit = iend - LASTLITERALS; in LZ4_compress_generic() 431 size_t const lastRun = (size_t)(iend - anchor); in LZ4_compress_generic() 534 const BYTE * const iend = ip + *srcSizePtr; in LZ4_compress_destSize_generic() local 535 const BYTE * const mflimit = iend - MFLIMIT; in LZ4_compress_destSize_generic() 536 const BYTE * const matchlimit = iend - LASTLITERALS; in LZ4_compress_destSize_generic() 690 size_t lastRunSize = (size_t)(iend - anchor); in LZ4_compress_destSize_generic()
|
H A D | lz4hc_compress.c | 350 const BYTE * const iend = ip + inputSize; in LZ4HC_compress_generic() local 351 const BYTE * const mflimit = iend - MFLIMIT; in LZ4HC_compress_generic() 352 const BYTE * const matchlimit = (iend - LASTLITERALS); in LZ4HC_compress_generic() 556 int lastRun = (int)(iend - anchor); in LZ4HC_compress_generic() 573 LZ4_memcpy(op, anchor, iend - anchor); in LZ4HC_compress_generic() 574 op += iend - anchor; in LZ4HC_compress_generic()
|
/kernel/linux/linux-6.6/lib/lz4/ |
H A D | lz4_decompress.c | 84 const BYTE * const iend = ip + srcSize; in LZ4_decompress_generic() local 98 const BYTE *const shortiend = iend - in LZ4_decompress_generic() 130 /* ip < iend before the increment */ in LZ4_decompress_generic() 131 assert(!endOnInput || ip <= iend); in LZ4_decompress_generic() 195 if (unlikely(endOnInput ? ip >= iend - RUN_MASK : 0)) { in LZ4_decompress_generic() 203 ? ip < iend - RUN_MASK in LZ4_decompress_generic() 225 || (ip + length > iend - (2 + 1 + LASTLITERALS)))) in LZ4_decompress_generic() 237 && (ip + length > iend)) { in LZ4_decompress_generic() 256 && ((ip + length != iend) in LZ4_decompress_generic() 279 if (!partialDecoding || (cpy == oend) || (ip >= (iend in LZ4_decompress_generic() [all...] |
H A D | lz4_compress.c | 196 const BYTE * const iend = ip + inputSize; in LZ4_compress_generic() local 197 const BYTE * const mflimit = iend - MFLIMIT; in LZ4_compress_generic() 198 const BYTE * const matchlimit = iend - LASTLITERALS; in LZ4_compress_generic() 431 size_t const lastRun = (size_t)(iend - anchor); in LZ4_compress_generic() 534 const BYTE * const iend = ip + *srcSizePtr; in LZ4_compress_destSize_generic() local 535 const BYTE * const mflimit = iend - MFLIMIT; in LZ4_compress_destSize_generic() 536 const BYTE * const matchlimit = iend - LASTLITERALS; in LZ4_compress_destSize_generic() 690 size_t lastRunSize = (size_t)(iend - anchor); in LZ4_compress_destSize_generic()
|
H A D | lz4hc_compress.c | 350 const BYTE * const iend = ip + inputSize; in LZ4HC_compress_generic() local 351 const BYTE * const mflimit = iend - MFLIMIT; in LZ4HC_compress_generic() 352 const BYTE * const matchlimit = (iend - LASTLITERALS); in LZ4HC_compress_generic() 556 int lastRun = (int)(iend - anchor); in LZ4HC_compress_generic() 573 LZ4_memcpy(op, anchor, iend - anchor); in LZ4HC_compress_generic() 574 op += iend - anchor; in LZ4HC_compress_generic()
|
/kernel/linux/linux-5.10/arch/arm64/kernel/ |
H A D | head.S | 198 * iend: index in table corresponding to vend 204 * Returns: istart, iend, count 206 .macro compute_indices, vstart, vend, shift, ptrs, istart, iend, count 207 lsr \iend, \vend, \shift 210 and \iend, \iend, \istart // iend = (vend >> shift) & (ptrs - 1) 213 add \iend, \iend, \istart // iend [all...] |
/kernel/linux/linux-6.6/arch/arm64/kernel/ |
H A D | head.S | 227 * iend: index in table corresponding to vend 233 * Returns: istart, iend, count 235 .macro compute_indices, vstart, vend, shift, order, istart, iend, count 237 ubfx \iend, \vend, \shift, \order 238 add \iend, \iend, \count, lsl \order 239 sub \count, \iend, \istart 258 * Temporaries: istart, iend, tmp, count, sv - these need to be different registers 260 * Corrupts: tbl, rtbl, vend, istart, iend, tmp, count, sv 262 .macro map_memory, tbl, rtbl, vstart, vend, flags, phys, order, istart, iend, tm [all...] |
/kernel/linux/linux-6.6/lib/zstd/decompress/ |
H A D | huf_decompress.c | 153 BYTE const* iend[4]; member 193 args->iend[0] = istart + 6; /* jumpTable */ in HUF_DecompressAsmArgs_init() 194 args->iend[1] = args->iend[0] + length1; in HUF_DecompressAsmArgs_init() 195 args->iend[2] = args->iend[1] + length2; in HUF_DecompressAsmArgs_init() 196 args->iend[3] = args->iend[2] + length3; in HUF_DecompressAsmArgs_init() 208 args->ip[0] = args->iend[1] - sizeof(U64); in HUF_DecompressAsmArgs_init() 209 args->ip[1] = args->iend[ in HUF_DecompressAsmArgs_init() 673 const BYTE* const iend = (const BYTE*)cSrc + 6; HUF_decompress4X1_usingDTable_internal_bmi2_asm() local 1387 const BYTE* const iend = (const BYTE*)cSrc + 6; HUF_decompress4X2_usingDTable_internal_bmi2_asm() local [all...] |
H A D | zstd_decompress.c | 1882 const char* const iend = input->size != 0 ? src + input->size : src; in ZSTD_decompressStream() local 1916 DEBUGLOG(5, "stage zdss_loadHeader (srcSize : %u)", (U32)(iend - ip)); in ZSTD_decompressStream() 1927 size_t const remainingInput = (size_t)(iend-ip); in ZSTD_decompressStream() 1928 assert(iend >= ip); in ZSTD_decompressStream() 1946 size_t const cSize = ZSTD_findFrameCompressedSize(istart, (size_t)(iend-istart)); in ZSTD_decompressStream() 1947 if (cSize <= (size_t)(iend-istart)) { in ZSTD_decompressStream() 2028 { size_t const neededInSize = ZSTD_nextSrcSizeToDecompressWithInputSize(zds, (size_t)(iend - ip)); in ZSTD_decompressStream() 2035 if ((size_t)(iend-ip) >= neededInSize) { /* decode directly from src */ in ZSTD_decompressStream() 2041 if (ip==iend) { someMoreWork = 0; break; } /* no more input */ in ZSTD_decompressStream() 2051 assert(neededInSize == ZSTD_nextSrcSizeToDecompressWithInputSize(zds, iend in ZSTD_decompressStream() [all...] |