/kernel/linux/linux-6.6/lib/zstd/common/ |
H A D | entropy_common.c | 225 const void* headerBuffer, size_t hbSize, int bmi2) in FSE_readNCount_bmi2() 228 if (bmi2) { in FSE_readNCount_bmi2() 232 (void)bmi2; in FSE_readNCount_bmi2() 240 return FSE_readNCount_bmi2(normalizedCounter, maxSVPtr, tableLogPtr, headerBuffer, hbSize, /* bmi2 */ 0); in FSE_readNCount() 256 return HUF_readStats_wksp(huffWeight, hwSize, rankStats, nbSymbolsPtr, tableLogPtr, src, srcSize, wksp, sizeof(wksp), /* bmi2 */ 0); in HUF_readStats() 264 int bmi2) in HUF_readStats_body() 289 oSize = FSE_decompress_wksp_bmi2(huffWeight, hwSize-1, ip+1, iSize, 6, workSpace, wkspSize, bmi2); in HUF_readStats_body() 348 int bmi2) in HUF_readStats_wksp() 351 if (bmi2) { in HUF_readStats_wksp() 355 (void)bmi2; in HUF_readStats_wksp() 223 FSE_readNCount_bmi2( short* normalizedCounter, unsigned* maxSVPtr, unsigned* tableLogPtr, const void* headerBuffer, size_t hbSize, int bmi2) FSE_readNCount_bmi2() argument 260 HUF_readStats_body(BYTE* huffWeight, size_t hwSize, U32* rankStats, U32* nbSymbolsPtr, U32* tableLogPtr, const void* src, size_t srcSize, void* workSpace, size_t wkspSize, int bmi2) HUF_readStats_body() argument 344 HUF_readStats_wksp(BYTE* huffWeight, size_t hwSize, U32* rankStats, U32* nbSymbolsPtr, U32* tableLogPtr, const void* src, size_t srcSize, void* workSpace, size_t wkspSize, int bmi2) HUF_readStats_wksp() argument [all...] |
H A D | huf.h | 190 size_t HUF_compress4X_usingCTable_bmi2(void* dst, size_t dstSize, const void* src, size_t srcSize, const HUF_CElt* CTable, int bmi2); 209 HUF_CElt* hufTable, HUF_repeat* repeat, int preferRepeat, int bmi2, unsigned suspectUncompressible); 233 * If the CPU has BMI2 support, pass bmi2=1, otherwise pass bmi2=0. 241 int bmi2); 304 size_t HUF_compress1X_usingCTable_bmi2(void* dst, size_t dstSize, const void* src, size_t srcSize, const HUF_CElt* CTable, int bmi2); 315 HUF_CElt* hufTable, HUF_repeat* repeat, int preferRepeat, int bmi2, unsigned suspectUncompressible); 342 * If the CPU has BMI2 support, pass bmi2=1, otherwise pass bmi2=0. 344 size_t HUF_decompress1X_usingDTable_bmi2(void* dst, size_t maxDstSize, const void* cSrc, size_t cSrcSize, const HUF_DTable* DTable, int bmi2); [all...] |
H A D | fse_decompress.c | 310 return FSE_decompress_wksp_bmi2(dst, dstCapacity, cSrc, cSrcSize, maxLog, workSpace, wkspSize, /* bmi2 */ 0); in FSE_decompress_wksp() 323 int bmi2) in FSE_decompress_wksp_body() 336 size_t const NCountLength = FSE_readNCount_bmi2(wksp->ncount, &maxSymbolValue, &tableLog, istart, cSrcSize, bmi2); in FSE_decompress_wksp_body() 374 size_t FSE_decompress_wksp_bmi2(void* dst, size_t dstCapacity, const void* cSrc, size_t cSrcSize, unsigned maxLog, void* workSpace, size_t wkspSize, int bmi2) in FSE_decompress_wksp_bmi2() argument 377 if (bmi2) { in FSE_decompress_wksp_bmi2() 381 (void)bmi2; in FSE_decompress_wksp_bmi2() 319 FSE_decompress_wksp_body( void* dst, size_t dstCapacity, const void* cSrc, size_t cSrcSize, unsigned maxLog, void* workSpace, size_t wkspSize, int bmi2) FSE_decompress_wksp_body() argument
|
H A D | cpu.h | 166 B(bmi2, 8)
|
H A D | fse.h | 235 * Same as FSE_readNCount() but pass bmi2=1 when your CPU supports BMI2 and 0 otherwise. 239 const void* rBuffer, size_t rBuffSize, int bmi2); 358 size_t FSE_decompress_wksp_bmi2(void* dst, size_t dstCapacity, const void* cSrc, size_t cSrcSize, unsigned maxLog, void* workSpace, size_t wkspSize, int bmi2);
|
/kernel/linux/linux-6.6/lib/zstd/compress/ |
H A D | huf_compress.c | 1048 const HUF_CElt* CTable, const int bmi2) in HUF_compress1X_usingCTable_internal() 1050 if (bmi2) { in HUF_compress1X_usingCTable_internal() 1061 const HUF_CElt* CTable, const int bmi2) in HUF_compress1X_usingCTable_internal() 1063 (void)bmi2; in HUF_compress1X_usingCTable_internal() 1071 return HUF_compress1X_usingCTable_bmi2(dst, dstSize, src, srcSize, CTable, /* bmi2 */ 0); in HUF_compress1X_usingCTable() 1074 size_t HUF_compress1X_usingCTable_bmi2(void* dst, size_t dstSize, const void* src, size_t srcSize, const HUF_CElt* CTable, int bmi2) in HUF_compress1X_usingCTable_bmi2() argument 1076 return HUF_compress1X_usingCTable_internal(dst, dstSize, src, srcSize, CTable, bmi2); in HUF_compress1X_usingCTable_bmi2() 1082 const HUF_CElt* CTable, int bmi2) in HUF_compress4X_usingCTable_internal() 1096 { CHECK_V_F(cSize, HUF_compress1X_usingCTable_internal(op, (size_t)(oend-op), ip, segmentSize, CTable, bmi2) ); in HUF_compress4X_usingCTable_internal() 1104 { CHECK_V_F(cSize, HUF_compress1X_usingCTable_internal(op, (size_t)(oend-op), ip, segmentSize, CTable, bmi2) ); in HUF_compress4X_usingCTable_internal() 1046 HUF_compress1X_usingCTable_internal(void* dst, size_t dstSize, const void* src, size_t srcSize, const HUF_CElt* CTable, const int bmi2) HUF_compress1X_usingCTable_internal() argument 1059 HUF_compress1X_usingCTable_internal(void* dst, size_t dstSize, const void* src, size_t srcSize, const HUF_CElt* CTable, const int bmi2) HUF_compress1X_usingCTable_internal() argument 1080 HUF_compress4X_usingCTable_internal(void* dst, size_t dstSize, const void* src, size_t srcSize, const HUF_CElt* CTable, int bmi2) HUF_compress4X_usingCTable_internal() argument 1134 HUF_compress4X_usingCTable_bmi2(void* dst, size_t dstSize, const void* src, size_t srcSize, const HUF_CElt* CTable, int bmi2) HUF_compress4X_usingCTable_bmi2() argument 1141 HUF_compressCTable_internal( BYTE* const ostart, BYTE* op, BYTE* const oend, const void* src, size_t srcSize, HUF_nbStreams_e nbStreams, const HUF_CElt* CTable, const int bmi2) HUF_compressCTable_internal() argument 1175 HUF_compress_internal(void* dst, size_t dstSize, const void* src, size_t srcSize, unsigned maxSymbolValue, unsigned huffLog, HUF_nbStreams_e nbStreams, void* workSpace, size_t wkspSize, HUF_CElt* oldHufTable, HUF_repeat* repeat, int preferRepeat, const int bmi2, unsigned suspectUncompressible) HUF_compress_internal() argument 1293 HUF_compress1X_repeat(void* dst, size_t dstSize, const void* src, size_t srcSize, unsigned maxSymbolValue, unsigned huffLog, void* workSpace, size_t wkspSize, HUF_CElt* hufTable, HUF_repeat* repeat, int preferRepeat, int bmi2, unsigned suspectUncompressible) HUF_compress1X_repeat() argument 1324 HUF_compress4X_repeat(void* dst, size_t dstSize, const void* src, size_t srcSize, unsigned maxSymbolValue, unsigned huffLog, void* workSpace, size_t wkspSize, HUF_CElt* hufTable, HUF_repeat* repeat, int preferRepeat, int bmi2, unsigned suspectUncompressible) HUF_compress4X_repeat() argument [all...] |
H A D | zstd_compress_superblock.c | 45 const int bmi2, int writeEntropy, int* entropyWritten) in ZSTD_compressSubBlock_literal() 56 (void)bmi2; /* TODO bmi2... */ in ZSTD_compressSubBlock_literal() 79 /* TODO bmi2 */ in ZSTD_compressSubBlock_literal() 165 const int bmi2, int writeEntropy, int* entropyWritten) in ZSTD_compressSubBlock_sequences() 213 longOffsets, bmi2); in ZSTD_compressSubBlock_sequences() 266 const int bmi2, in ZSTD_compressSubBlock() 278 op, oend-op, bmi2, writeLitEntropy, litEntropyWritten); in ZSTD_compressSubBlock() 289 bmi2, writeSeqEntropy, seqEntropyWritten); in ZSTD_compressSubBlock() 432 const int bmi2, U3 in ZSTD_compressSubBlock_multi() 41 ZSTD_compressSubBlock_literal(const HUF_CElt* hufTable, const ZSTD_hufCTablesMetadata_t* hufMetadata, const BYTE* literals, size_t litSize, void* dst, size_t dstSize, const int bmi2, int writeEntropy, int* entropyWritten) ZSTD_compressSubBlock_literal() argument 159 ZSTD_compressSubBlock_sequences(const ZSTD_fseCTables_t* fseTables, const ZSTD_fseCTablesMetadata_t* fseMetadata, const seqDef* sequences, size_t nbSeq, const BYTE* llCode, const BYTE* mlCode, const BYTE* ofCode, const ZSTD_CCtx_params* cctxParams, void* dst, size_t dstCapacity, const int bmi2, int writeEntropy, int* entropyWritten) ZSTD_compressSubBlock_sequences() argument 259 ZSTD_compressSubBlock(const ZSTD_entropyCTables_t* entropy, const ZSTD_entropyCTablesMetadata_t* entropyMetadata, const seqDef* sequences, size_t nbSeq, const BYTE* literals, size_t litSize, const BYTE* llCode, const BYTE* mlCode, const BYTE* ofCode, const ZSTD_CCtx_params* cctxParams, void* dst, size_t dstCapacity, const int bmi2, int writeLitEntropy, int writeSeqEntropy, int* litEntropyWritten, int* seqEntropyWritten, U32 lastBlock) ZSTD_compressSubBlock() argument 425 ZSTD_compressSubBlock_multi(const seqStore_t* seqStorePtr, const ZSTD_compressedBlockState_t* prevCBlock, ZSTD_compressedBlockState_t* nextCBlock, const ZSTD_entropyCTablesMetadata_t* entropyMetadata, const ZSTD_CCtx_params* cctxParams, void* dst, size_t dstCapacity, const void* src, size_t srcSize, const int bmi2, U32 lastBlock, void* workspace, size_t wkspSize) ZSTD_compressSubBlock_multi() argument [all...] |
H A D | zstd_compress_literals.c | 76 const int bmi2, in ZSTD_compressLiterals() 109 (HUF_CElt*)nextHuf->CTable, &repeat, preferRepeat, bmi2, suspectUncompressible) : in ZSTD_compressLiterals() 113 (HUF_CElt*)nextHuf->CTable, &repeat, preferRepeat, bmi2, suspectUncompressible); in ZSTD_compressLiterals() 70 ZSTD_compressLiterals(ZSTD_hufCTables_t const* prevHuf, ZSTD_hufCTables_t* nextHuf, ZSTD_strategy strategy, int disableLiteralCompression, void* dst, size_t dstCapacity, const void* src, size_t srcSize, void* entropyWorkspace, size_t entropyWorkspaceSize, const int bmi2, unsigned suspectUncompressible) ZSTD_compressLiterals() argument
|
H A D | zstd_compress_literals.h | 28 const int bmi2,
|
H A D | zstd_compress_sequences.h | 45 seqDef const* sequences, size_t nbSeq, int longOffsets, int bmi2);
|
H A D | zstd_compress_sequences.c | 424 seqDef const* sequences, size_t nbSeq, int longOffsets, int bmi2) in ZSTD_encodeSequences() 428 if (bmi2) { in ZSTD_encodeSequences() 436 (void)bmi2; in ZSTD_encodeSequences() 419 ZSTD_encodeSequences( void* dst, size_t dstCapacity, FSE_CTable const* CTable_MatchLength, BYTE const* mlCodeTable, FSE_CTable const* CTable_OffsetBits, BYTE const* ofCodeTable, FSE_CTable const* CTable_LitLength, BYTE const* llCodeTable, seqDef const* sequences, size_t nbSeq, int longOffsets, int bmi2) ZSTD_encodeSequences() argument
|
H A D | zstd_compress.c | 99 cctx->bmi2 = ZSTD_cpuSupportsBmi2(); in ZSTD_initCCtx() 138 cctx->bmi2 = ZSTD_cpuid_bmi2(ZSTD_cpuid()); in ZSTD_initStaticCCtx() 2489 const int bmi2) in ZSTD_entropyCompressSeqStore_internal() 2528 bmi2, suspectUncompressible); in ZSTD_entropyCompressSeqStore_internal() 2575 longOffsets, bmi2); in ZSTD_entropyCompressSeqStore_internal() 2608 int bmi2) in ZSTD_entropyCompressSeqStore() 2613 entropyWorkspace, entropyWkspSize, bmi2); in ZSTD_entropyCompressSeqStore() 3432 zc->bmi2); in ZSTD_compressSeqStore_singleBlock() 3706 zc->bmi2); in ZSTD_compressBlock_internal() 5860 cctx->bmi2); in ZSTD_compressSequences_internal() 2483 ZSTD_entropyCompressSeqStore_internal(seqStore_t* seqStorePtr, const ZSTD_entropyCTables_t* prevEntropy, ZSTD_entropyCTables_t* nextEntropy, const ZSTD_CCtx_params* cctxParams, void* dst, size_t dstCapacity, void* entropyWorkspace, size_t entropyWkspSize, const int bmi2) ZSTD_entropyCompressSeqStore_internal() argument 2601 ZSTD_entropyCompressSeqStore(seqStore_t* seqStorePtr, const ZSTD_entropyCTables_t* prevEntropy, ZSTD_entropyCTables_t* nextEntropy, const ZSTD_CCtx_params* cctxParams, void* dst, size_t dstCapacity, size_t srcSize, void* entropyWorkspace, size_t entropyWkspSize, int bmi2) ZSTD_entropyCompressSeqStore() argument [all...] |
H A D | zstd_compress_internal.h | 361 int bmi2; /* == 1 if the CPU supports BMI2 and 0 otherwise. CPU support is determined dynamically once per context lifetime. */ member
|
/kernel/linux/linux-6.6/lib/zstd/decompress/ |
H A D | huf_decompress.c | 104 size_t cSrcSize, HUF_DTable const* DTable, int bmi2) \ 106 if (bmi2) { \ 116 size_t cSrcSize, HUF_DTable const* DTable, int bmi2) \ 118 (void)bmi2; \ 335 return HUF_readDTableX1_wksp_bmi2(DTable, src, srcSize, workSpace, wkspSize, /* bmi2 */ 0); in HUF_readDTableX1_wksp() 338 size_t HUF_readDTableX1_wksp_bmi2(HUF_DTable* DTable, const void* src, size_t srcSize, void* workSpace, size_t wkspSize, int bmi2) in HUF_readDTableX1_wksp_bmi2() argument 353 iSize = HUF_readStats_wksp(wksp->huffWeight, HUF_SYMBOLVALUE_MAX + 1, wksp->rankVal, &nbSymbols, &tableLog, src, srcSize, wksp->statsWksp, sizeof(wksp->statsWksp), bmi2); in HUF_readDTableX1_wksp_bmi2() 727 size_t cSrcSize, HUF_DTable const* DTable, int bmi2) in HUF_decompress4X1_usingDTable_internal() 730 if (bmi2) { in HUF_decompress4X1_usingDTable_internal() 738 (void)bmi2; in HUF_decompress4X1_usingDTable_internal() 726 HUF_decompress4X1_usingDTable_internal(void* dst, size_t dstSize, void const* cSrc, size_t cSrcSize, HUF_DTable const* DTable, int bmi2) HUF_decompress4X1_usingDTable_internal() argument 784 HUF_decompress4X1_DCtx_wksp_bmi2(HUF_DTable* dctx, void* dst, size_t dstSize, const void* cSrc, size_t cSrcSize, void* workSpace, size_t wkspSize, int bmi2) HUF_decompress4X1_DCtx_wksp_bmi2() argument 1048 HUF_readDTableX2_wksp_bmi2(HUF_DTable* DTable, const void* src, size_t srcSize, void* workSpace, size_t wkspSize, int bmi2) HUF_readDTableX2_wksp_bmi2() argument 1431 HUF_decompress4X2_usingDTable_internal(void* dst, size_t dstSize, void const* cSrc, size_t cSrcSize, HUF_DTable const* DTable, int bmi2) HUF_decompress4X2_usingDTable_internal() argument 1491 HUF_decompress4X2_DCtx_wksp_bmi2(HUF_DTable* dctx, void* dst, size_t dstSize, const void* cSrc, size_t cSrcSize, void* workSpace, size_t wkspSize, int bmi2) HUF_decompress4X2_DCtx_wksp_bmi2() argument 1671 HUF_decompress1X_usingDTable_bmi2(void* dst, size_t maxDstSize, const void* cSrc, size_t cSrcSize, const HUF_DTable* DTable, int bmi2) HUF_decompress1X_usingDTable_bmi2() argument 1689 HUF_decompress1X1_DCtx_wksp_bmi2(HUF_DTable* dctx, void* dst, size_t dstSize, const void* cSrc, size_t cSrcSize, void* workSpace, size_t wkspSize, int bmi2) HUF_decompress1X1_DCtx_wksp_bmi2() argument 1702 HUF_decompress4X_usingDTable_bmi2(void* dst, size_t maxDstSize, const void* cSrc, size_t cSrcSize, const HUF_DTable* DTable, int bmi2) HUF_decompress4X_usingDTable_bmi2() argument 1719 HUF_decompress4X_hufOnly_wksp_bmi2(HUF_DTable* dctx, void* dst, size_t dstSize, const void* cSrc, size_t cSrcSize, void* workSpace, size_t wkspSize, int bmi2) HUF_decompress4X_hufOnly_wksp_bmi2() argument [all...] |
H A D | zstd_decompress_internal.h | 156 int bmi2; /* == 1 if the CPU supports BMI2 and 0 otherwise. CPU support is determined dynamically once per context lifetime. */ member 203 return dctx->bmi2; in ZSTD_DCtx_get_bmi2()
|
H A D | zstd_decompress_block.h | 65 int bmi2);
|
H A D | zstd_decompress_block.c | 19 #include "../common/cpu.h" /* bmi2 */ 587 unsigned tableLog, void* wksp, size_t wkspSize, int bmi2) in ZSTD_buildFSETable() 590 if (bmi2) { in ZSTD_buildFSETable() 596 (void)bmi2; in ZSTD_buildFSETable() 611 int bmi2) in ZSTD_buildSeqTable() 643 ZSTD_buildFSETable(DTableSpace, norm, max, baseValue, nbAdditionalBits, tableLog, wksp, wkspSize, bmi2); in ZSTD_buildSeqTable() 584 ZSTD_buildFSETable(ZSTD_seqSymbol* dt, const short* normalizedCounter, unsigned maxSymbolValue, const U32* baseValue, const U8* nbAdditionalBits, unsigned tableLog, void* wksp, size_t wkspSize, int bmi2) ZSTD_buildFSETable() argument 605 ZSTD_buildSeqTable(ZSTD_seqSymbol* DTableSpace, const ZSTD_seqSymbol** DTablePtr, symbolEncodingType_e type, unsigned max, U32 maxLog, const void* src, size_t srcSize, const U32* baseValue, const U8* nbAdditionalBits, const ZSTD_seqSymbol* defaultTable, U32 flagRepeatTable, int ddictIsCold, int nbSeq, U32* wksp, size_t wkspSize, int bmi2) ZSTD_buildSeqTable() argument
|
H A D | zstd_decompress.c | 257 dctx->bmi2 = ZSTD_cpuSupportsBmi2(); in ZSTD_initDCtx_internal() 1327 /* bmi2 */0); in ZSTD_loadDEntropy() 1342 /* bmi2 */ 0); in ZSTD_loadDEntropy() 1357 /* bmi2 */ 0); in ZSTD_loadDEntropy()
|