| /linux-6.15/lib/zstd/compress/ |
| H A D | zstd_compress_literals.c | 27 for (u=0; u<srcSize; u++) { in showHexa() 31 return srcSize; in showHexa() 43 U32 const flSize = 1 + (srcSize>31) + (srcSize>4095); in ZSTD_noCompressLiterals() 65 DEBUGLOG(5, "Raw (uncompressed) literals: %u -> %u", (U32)srcSize, (U32)(srcSize + flSize)); in ZSTD_noCompressLiterals() 66 return srcSize + flSize; in ZSTD_noCompressLiterals() 71 assert(srcSize >= 1); in allBytesIdentical() 75 for (p=1; p<srcSize; p++) { in allBytesIdentical() 85 U32 const flSize = 1 + (srcSize>31) + (srcSize>4095); in ZSTD_compressRleLiteralsBlock() 141 size_t const lhSize = 3 + (srcSize >= 1 KB) + (srcSize >= 16 KB); in ZSTD_compressLiterals() 143 U32 singleStream = srcSize < 256; in ZSTD_compressLiterals() [all …]
|
| H A D | zstd_lazy.h | 42 void const* src, size_t srcSize); 45 void const* src, size_t srcSize); 48 void const* src, size_t srcSize); 51 void const* src, size_t srcSize); 54 void const* src, size_t srcSize); 57 void const* src, size_t srcSize); 60 void const* src, size_t srcSize); 63 void const* src, size_t srcSize); 87 void const* src, size_t srcSize); 90 void const* src, size_t srcSize); [all …]
|
| H A D | huf_compress.c | 995 int n = (int)srcSize; in HUF_compress1X_usingCTable_internal_body_loop() 1049 return ((srcSize * tableLog) >> 3) + 8; in HUF_tightCompressBound() 1172 const BYTE* const iend = ip + srcSize; in HUF_compress4X_usingCTable_internal() 1224 const void* src, size_t srcSize, in HUF_compressCTable_internal() argument 1272 size_t srcSize, in HUF_optimalTableLog() argument 1332 const void* src, size_t srcSize, in HUF_compress_internal() argument 1348 if (!srcSize) return 0; /* Uncompressed */ in HUF_compress_internal() 1423 if (hSize + 12ul >= srcSize) { return 0; } in HUF_compress_internal() 1430 src, srcSize, in HUF_compress_internal() 1435 const void* src, size_t srcSize, in HUF_compress1X_repeat() argument [all …]
|
| H A D | hist.h | 32 const void* src, size_t srcSize); 48 const void* src, size_t srcSize, 56 const void* src, size_t srcSize); 64 const void* src, size_t srcSize, 76 const void* src, size_t srcSize); 83 void HIST_add(unsigned* count, const void* src, size_t srcSize);
|
| H A D | zstd_opt.h | 27 void const* src, size_t srcSize); 30 void const* src, size_t srcSize); 33 void const* src, size_t srcSize); 47 void const* src, size_t srcSize); 50 void const* src, size_t srcSize); 53 void const* src, size_t srcSize); 60 void const* src, size_t srcSize);
|
| H A D | zstd_fast.c | 193 void const* src, size_t srcSize, in ZSTD_compressBlock_fast_noDict_generic() argument 202 const U32 endIndex = (U32)((size_t)(istart - base) + srcSize); in ZSTD_compressBlock_fast_noDict_generic() 205 const BYTE* const iend = istart + srcSize; in ZSTD_compressBlock_fast_noDict_generic() 444 void const* src, size_t srcSize) in ZSTD_compressBlock_fast() argument 498 const BYTE* const iend = istart + srcSize; in ZSTD_compressBlock_fast_dictMatchState_generic() 516 const U32 endIndex = (U32)((size_t)(istart - base) + srcSize); in ZSTD_compressBlock_fast_dictMatchState_generic() 687 void const* src, size_t srcSize) in ZSTD_compressBlock_fast_dictMatchState() argument 721 const U32 endIndex = (U32)((size_t)(istart - base) + srcSize); in ZSTD_compressBlock_fast_extDict_generic() 729 const BYTE* const iend = istart + srcSize; in ZSTD_compressBlock_fast_extDict_generic() 761 return ZSTD_compressBlock_fast(ms, seqStore, rep, src, srcSize); in ZSTD_compressBlock_fast_extDict_generic() [all …]
|
| H A D | fse_compress.c | 349 static unsigned FSE_minTableLog(size_t srcSize, unsigned maxSymbolValue) in FSE_minTableLog() argument 351 U32 minBitsSrc = ZSTD_highbit32((U32)(srcSize)) + 1; in FSE_minTableLog() 360 U32 maxBitsSrc = ZSTD_highbit32((U32)(srcSize - 1)) - minus; in FSE_optimalTableLog_internal() 362 U32 minBits = FSE_minTableLog(srcSize, maxSymbolValue); in FSE_optimalTableLog_internal() 553 const void* src, size_t srcSize, in FSE_compress_usingCTable_generic() argument 557 const BYTE* const iend = istart + srcSize; in FSE_compress_usingCTable_generic() 564 if (srcSize <= 2) return 0; in FSE_compress_usingCTable_generic() 570 if (srcSize & 1) { in FSE_compress_usingCTable_generic() 581 srcSize -= 2; in FSE_compress_usingCTable_generic() 612 const void* src, size_t srcSize, in FSE_compress_usingCTable() argument [all …]
|
| H A D | zstd_ldm.c | 362 BYTE const* const iend = istart + srcSize; in ZSTD_ldm_generateSequences_internal() 374 if (srcSize < minMatchLength) in ZSTD_ldm_generateSequences_internal() 533 BYTE const* const iend = istart + srcSize; in ZSTD_ldm_generateSequences() 535 size_t const nbChunks = (srcSize / kMaxChunkSize) + ((srcSize % kMaxChunkSize) != 0); in ZSTD_ldm_generateSequences() 609 if (srcSize <= seq->litLength) { in ZSTD_ldm_skipSequences() 611 seq->litLength -= (U32)srcSize; in ZSTD_ldm_skipSequences() 614 srcSize -= seq->litLength; in ZSTD_ldm_skipSequences() 616 if (srcSize < seq->matchLength) { in ZSTD_ldm_skipSequences() 618 seq->matchLength -= (U32)srcSize; in ZSTD_ldm_skipSequences() 628 srcSize -= seq->matchLength; in ZSTD_ldm_skipSequences() [all …]
|
| H A D | zstd_double_fast.c | 108 void const* src, size_t srcSize, U32 const mls /* template */) in ZSTD_compressBlock_doubleFast_noDict_generic() argument 118 const U32 endIndex = (U32)((size_t)(istart - base) + srcSize); in ZSTD_compressBlock_doubleFast_noDict_generic() 122 const BYTE* const iend = istart + srcSize; in ZSTD_compressBlock_doubleFast_noDict_generic() 331 void const* src, size_t srcSize, in ZSTD_compressBlock_doubleFast_dictMatchState_generic() argument 343 const U32 endIndex = (U32)((size_t)(istart - base) + srcSize); in ZSTD_compressBlock_doubleFast_dictMatchState_generic() 347 const BYTE* const iend = istart + srcSize; in ZSTD_compressBlock_doubleFast_dictMatchState_generic() 571 void const* src, size_t srcSize) in ZSTD_compressBlock_doubleFast() argument 591 void const* src, size_t srcSize) in ZSTD_compressBlock_doubleFast_dictMatchState() argument 613 void const* src, size_t srcSize, in ZSTD_compressBlock_doubleFast_extDict_generic() argument 624 const BYTE* const iend = istart + srcSize; in ZSTD_compressBlock_doubleFast_extDict_generic() [all …]
|
| H A D | zstd_compress.c | 1553 if (srcSize == 0) srcSize = ZSTD_CONTENTSIZE_UNKNOWN; in ZSTD_adjustCParams() 2972 srcSize, in ZSTD_entropyCompressSeqStore() 3104 if (srcSize == 0) { in ZSTD_postProcessSequenceProducerResult() 3287 srcSize in ZSTD_buildSeqStore() 4043 srcSize, in ZSTD_compressSeqStore_singleBlock() 4333 srcSize, in ZSTD_compressBlock_internal() 4403 srcSize - ZSTD_minGain(srcSize, zc->appliedParams.cParams.strategy); in ZSTD_compressBlock_targetCBlockSize_body() 4511 size_t remaining = srcSize; in ZSTD_compress_frameChunk() 6349 input.size = srcSize; in ZSTD_compressStream2_simpleArgs() 6742 size_t remaining = srcSize; in ZSTD_compressSequences_internal() [all …]
|
| H A D | zstd_compress_literals.h | 18 size_t ZSTD_noCompressLiterals (void* dst, size_t dstCapacity, const void* src, size_t srcSize); 24 …e_t ZSTD_compressRleLiteralsBlock (void* dst, size_t dstCapacity, const void* src, size_t srcSize); 32 const void* src, size_t srcSize,
|
| H A D | zstd_fast.h | 23 void const* src, size_t srcSize); 26 void const* src, size_t srcSize); 29 void const* src, size_t srcSize);
|
| H A D | zstd_preSplit.c | 58 addEvents_generic(Fingerprint* fp, const void* src, size_t srcSize, size_t samplingRate, unsigned h… in addEvents_generic() argument 61 size_t limit = srcSize - HASHLENGTH + 1; in addEvents_generic() 63 assert(srcSize >= HASHLENGTH); in addEvents_generic() 71 recordFingerprint_generic(Fingerprint* fp, const void* src, size_t srcSize, size_t samplingRate, un… in recordFingerprint_generic() argument 75 addEvents_generic(fp, src, srcSize, samplingRate, hashLog); in recordFingerprint_generic() 78 typedef void (*RecordEvents_f)(Fingerprint* fp, const void* src, size_t srcSize); 83 static void FP_RECORD(_rate)(Fingerprint* fp, const void* src, size_t srcSize) \ 85 recordFingerprint_generic(fp, src, srcSize, _rate, _hSize); \
|
| H A D | zstd_lazy.c | 1787 void const* src, size_t srcSize) in ZSTD_compressBlock_greedy() argument 1794 void const* src, size_t srcSize) in ZSTD_compressBlock_greedy_dictMatchState() argument 1801 void const* src, size_t srcSize) in ZSTD_compressBlock_greedy_dedicatedDictSearch() argument 1808 void const* src, size_t srcSize) in ZSTD_compressBlock_greedy_row() argument 1815 void const* src, size_t srcSize) in ZSTD_compressBlock_greedy_dictMatchState_row() argument 1822 void const* src, size_t srcSize) in ZSTD_compressBlock_greedy_dedicatedDictSearch_row() argument 1831 void const* src, size_t srcSize) in ZSTD_compressBlock_lazy() argument 1838 void const* src, size_t srcSize) in ZSTD_compressBlock_lazy_dictMatchState() argument 1845 void const* src, size_t srcSize) in ZSTD_compressBlock_lazy_dedicatedDictSearch() argument 1852 void const* src, size_t srcSize) in ZSTD_compressBlock_lazy_row() argument [all …]
|
| H A D | zstd_double_fast.h | 26 void const* src, size_t srcSize); 29 void const* src, size_t srcSize); 32 void const* src, size_t srcSize);
|
| H A D | zstd_ldm.h | 44 ldmParams_t const* params, void const* src, size_t srcSize); 67 void const* src, size_t srcSize); 76 void ZSTD_ldm_skipSequences(RawSeqStore_t* rawSeqStore, size_t srcSize,
|
| H A D | hist.c | 30 void HIST_add(unsigned* count, const void* src, size_t srcSize) in HIST_add() argument 33 const BYTE* const end = ip + srcSize; in HIST_add() 41 const void* src, size_t srcSize) in HIST_count_simple() argument 44 const BYTE* const end = ip + srcSize; in HIST_count_simple() 49 if (srcSize==0) { *maxSymbolValuePtr = 0; return 0; } in HIST_count_simple()
|
| H A D | zstd_opt.c | 153 if (srcSize <= ZSTD_PREDEF_THRESHOLD) { in ZSTD_rescaleFreqs() 1467 const void* src, size_t srcSize) in ZSTD_compressBlock_btopt() argument 1503 ms->window.base -= srcSize; in ZSTD_initStats_ultra() 1504 ms->window.dictLimit += (U32)srcSize; in ZSTD_initStats_ultra() 1512 const void* src, size_t srcSize) in ZSTD_compressBlock_btultra() argument 1520 const void* src, size_t srcSize) in ZSTD_compressBlock_btultra2() argument 1533 assert(srcSize <= ZSTD_BLOCKSIZE_MAX); in ZSTD_compressBlock_btultra2() 1550 const void* src, size_t srcSize) in ZSTD_compressBlock_btopt_dictMatchState() argument 1557 const void* src, size_t srcSize) in ZSTD_compressBlock_btopt_extDict() argument 1566 const void* src, size_t srcSize) in ZSTD_compressBlock_btultra_dictMatchState() argument [all …]
|
| H A D | zstd_compress_internal.h | 572 void const* src, size_t srcSize); 646 RETURN_ERROR_IF(srcSize + ZSTD_blockHeaderSize > dstCapacity, in ZSTD_noCompressBlock() 649 ZSTD_memcpy((BYTE*)dst + ZSTD_blockHeaderSize, src, srcSize); in ZSTD_noCompressBlock() 650 return ZSTD_blockHeaderSize + srcSize; in ZSTD_noCompressBlock() 674 return (srcSize >> minlog) + 2; in ZSTD_minGain() 1347 const void* src, size_t srcSize, in ZSTD_window_update() argument 1353 if (srcSize == 0) in ZSTD_window_update() 1371 window->nextSrc = ip + srcSize; in ZSTD_window_update() 1373 if ( (ip+srcSize > window->dictBase + window->lowLimit) in ZSTD_window_update() 1619 const void* src, size_t srcSize); [all …]
|
| /linux-6.15/lib/zstd/decompress/ |
| H A D | zstd_decompress.c | 438 if (srcSize > 0) { in ZSTD_getFrameHeader_advanced() 442 if (srcSize < minInputSize) { in ZSTD_getFrameHeader_advanced() 651 srcSize -= frameSrcSize; in ZSTD_findDecompressedSize() 798 while (srcSize > 0) { in ZSTD_decompressBound() 806 srcSize -= compressedSize; in ZSTD_decompressBound() 818 while (srcSize > 0) { in ZSTD_decompressionMargin() 846 srcSize -= compressedSize; in ZSTD_decompressionMargin() 876 if (srcSize == 0) return 0; in ZSTD_copyRawBlock() 880 return srcSize; in ZSTD_copyRawBlock() 1231 ZSTD_memcpy(dctx->headerBuffer + (dctx->headerSize - srcSize), src, srcSize); in ZSTD_decompressContinue() [all …]
|
| H A D | zstd_decompress_block.c | 64 size_t ZSTD_getcBlockSize(const void* src, size_t srcSize, in ZSTD_getcBlockSize() argument 67 RETURN_ERROR_IF(srcSize < ZSTD_blockHeaderSize, srcSize_wrong, ""); in ZSTD_getcBlockSize() 345 const void* src, size_t srcSize, 348 const void* src, size_t srcSize, in ZSTD_decodeLiteralsBlock_wrapper() argument 650 const void* src, size_t srcSize, in ZSTD_buildSeqTable() argument 659 RETURN_ERROR_IF(!srcSize, srcSize_wrong, ""); in ZSTD_buildSeqTable() 697 const void* src, size_t srcSize) in ZSTD_decodeSeqHeaders() argument 700 const BYTE* const iend = istart + srcSize; in ZSTD_decodeSeqHeaders() 2089 srcSize -= litCSize; 2128 srcSize -= seqHSize; [all …]
|
| /linux-6.15/include/linux/ |
| H A D | zstd_lib.h | 140 const void* src, size_t srcSize, 228 …SBOUND(srcSize) (((size_t)(srcSize) >= ZSTD_MAX_INPUT_SIZE) ? 0 : (srcSize) + ((srcSize)>>8) + (… argument 273 const void* src, size_t srcSize, 293 const void* src, size_t srcSize); 604 const void* src, size_t srcSize); 945 const void* src, size_t srcSize, 1561 ZSTDLIB_STATIC_API size_t ZSTD_sequenceBound(size_t srcSize); 1595 const void* src, size_t srcSize); 1650 const void* src, size_t srcSize); 1958 const void* src, size_t srcSize, [all …]
|
| /linux-6.15/lib/zstd/common/ |
| H A D | entropy_common.c | 237 const void* src, size_t srcSize) in HUF_readStats() argument 246 const void* src, size_t srcSize, in HUF_readStats_body() argument 255 if (!srcSize) return ERROR(srcSize_wrong); in HUF_readStats_body() 262 if (iSize+1 > srcSize) return ERROR(srcSize_wrong); in HUF_readStats_body() 271 if (iSize+1 > srcSize) return ERROR(srcSize_wrong); in HUF_readStats_body() 312 const void* src, size_t srcSize, in HUF_readStats_body_default() argument 315 …tats_body(huffWeight, hwSize, rankStats, nbSymbolsPtr, tableLogPtr, src, srcSize, workSpace, wkspS… in HUF_readStats_body_default() 321 const void* src, size_t srcSize, in HUF_readStats_body_bmi2() argument 324 …tats_body(huffWeight, hwSize, rankStats, nbSymbolsPtr, tableLogPtr, src, srcSize, workSpace, wkspS… in HUF_readStats_body_bmi2() 330 const void* src, size_t srcSize, in HUF_readStats_wksp() argument [all …]
|
| H A D | huf.h | 133 unsigned HUF_optimalTableLog(unsigned maxTableLog, size_t srcSize, unsigned maxSymbolValue, void* w… 136 size_t HUF_compress4X_usingCTable(void* dst, size_t dstSize, const void* src, size_t srcSize, const… 153 const void* src, size_t srcSize, 175 const void* src, size_t srcSize); 186 const void* src, size_t srcSize, 192 … (HUF_CElt* CTable, unsigned* maxSymbolValuePtr, const void* src, size_t srcSize, unsigned *hasZer… 244 size_t HUF_compress1X_usingCTable(void* dst, size_t dstSize, const void* src, size_t srcSize, const… 252 const void* src, size_t srcSize, 272 size_t HUF_readDTableX1_wksp(HUF_DTable* DTable, const void* src, size_t srcSize, void* workSpace, … 275 size_t HUF_readDTableX2_wksp(HUF_DTable* DTable, const void* src, size_t srcSize, void* workSpace, …
|
| H A D | bitstream.h | 98 MEM_STATIC size_t BIT_initDStream(BIT_DStream_t* bitD, const void* srcBuffer, size_t srcSize); 239 MEM_STATIC size_t BIT_initDStream(BIT_DStream_t* bitD, const void* srcBuffer, size_t srcSize) in BIT_initDStream() argument 241 if (srcSize < 1) { ZSTD_memset(bitD, 0, sizeof(*bitD)); return ERROR(srcSize_wrong); } in BIT_initDStream() 246 if (srcSize >= sizeof(bitD->bitContainer)) { /* normal case */ in BIT_initDStream() 247 bitD->ptr = (const char*)srcBuffer + srcSize - sizeof(bitD->bitContainer); in BIT_initDStream() 249 { BYTE const lastByte = ((const BYTE*)srcBuffer)[srcSize-1]; in BIT_initDStream() 255 switch(srcSize) in BIT_initDStream() 277 { BYTE const lastByte = ((const BYTE*)srcBuffer)[srcSize-1]; in BIT_initDStream() 281 bitD->bitsConsumed += (U32)(sizeof(bitD->bitContainer) - srcSize)*8; in BIT_initDStream() 284 return srcSize; in BIT_initDStream()
|