| /lib/lz4/ |
| A D | lz4_compress.c | 100 const BYTE **hashTable = (const BYTE **)tableBase; in LZ4_putPositionOnHash() 140 const BYTE **hashTable = (const BYTE **) tableBase; in LZ4_getPositionOnHash() 187 const BYTE *ip = (const BYTE *) source; in LZ4_compress_generic() 194 const BYTE *anchor = (const BYTE *) source; in LZ4_compress_generic() 199 BYTE *op = (BYTE *) dest; in LZ4_compress_generic() 529 const BYTE *ip = (const BYTE *) src; in LZ4_compress_destSize_generic() 530 const BYTE *base = (const BYTE *) src; in LZ4_compress_destSize_generic() 531 const BYTE *lowLimit = (const BYTE *) src; in LZ4_compress_destSize_generic() 537 BYTE *op = (BYTE *) dst; in LZ4_compress_destSize_generic() 778 const BYTE *p = (const BYTE *)dictionary; in LZ4_loadDict() [all …]
|
| A D | lz4hc_compress.c | 74 const BYTE *ip) in LZ4HC_Insert() 100 const BYTE *ip, in LZ4HC_InsertAndFindBestMatch() 262 const BYTE **ip, in LZ4HC_encodeSequence() 263 BYTE **op, in LZ4HC_encodeSequence() 268 BYTE *oend) in LZ4HC_encodeSequence() 271 BYTE *token; in LZ4HC_encodeSequence() 347 const BYTE *ip = (const BYTE *) source; in LZ4HC_compress_generic() 353 BYTE *op = (BYTE *) dest; in LZ4HC_compress_generic() 363 const BYTE *start0; in LZ4HC_compress_generic() 364 const BYTE *ref0; in LZ4HC_compress_generic() [all …]
|
| A D | lz4_decompress.c | 75 const BYTE * const lowPrefix, in LZ4_decompress_generic() 77 const BYTE * const dictStart, in LZ4_decompress_generic() 82 const BYTE *ip = (const BYTE *) src; in LZ4_decompress_generic() 85 BYTE *op = (BYTE *) dst; in LZ4_decompress_generic() 86 BYTE * const oend = op + outputSize; in LZ4_decompress_generic() 87 BYTE *cpy; in LZ4_decompress_generic() 89 const BYTE * const dictEnd = (const BYTE *)dictStart + dictSize; in LZ4_decompress_generic() 97 const BYTE *const shortiend = iend - in LZ4_decompress_generic() 99 const BYTE *const shortoend = oend - in LZ4_decompress_generic() 122 const BYTE *match; in LZ4_decompress_generic() [all …]
|
| A D | lz4defs.h | 51 typedef uint8_t BYTE; typedef 175 BYTE *d = (BYTE *)dstPtr; in LZ4_wildCopy() 176 const BYTE *s = (const BYTE *)srcPtr; in LZ4_wildCopy() 177 BYTE *const e = (BYTE *)dstEnd; in LZ4_wildCopy() 196 const BYTE *pIn, in LZ4_count() 197 const BYTE *pMatch, in LZ4_count() 198 const BYTE *pInLimit) in LZ4_count() 200 const BYTE *const pStart = pIn; in LZ4_count()
|
| /lib/zstd/compress/ |
| A D | hist.c | 32 const BYTE* ip = (const BYTE*)src; in HIST_add() 33 const BYTE* const end = ip + srcSize; in HIST_add() 43 const BYTE* ip = (const BYTE*)src; in HIST_count_simple() 44 const BYTE* const end = ip + srcSize; in HIST_count_simple() 83 const BYTE* ip = (const BYTE*)source; in HIST_count_parallel_wksp() 105 Counting1[(BYTE) c ]++; in HIST_count_parallel_wksp() 106 Counting2[(BYTE)(c>>8) ]++; in HIST_count_parallel_wksp() 107 Counting3[(BYTE)(c>>16)]++; in HIST_count_parallel_wksp() 110 Counting1[(BYTE) c ]++; in HIST_count_parallel_wksp() 111 Counting2[(BYTE)(c>>8) ]++; in HIST_count_parallel_wksp() [all …]
|
| A D | zstd_compress_superblock.c | 51 BYTE* const ostart = (BYTE*)dst; in ZSTD_compressSubBlock_literal() 167 const BYTE* llCode, const BYTE* mlCode, const BYTE* ofCode, in ZSTD_compressSubBlock_sequences() 173 BYTE* const ostart = (BYTE*)dst; in ZSTD_compressSubBlock_sequences() 175 BYTE* op = ostart; in ZSTD_compressSubBlock_sequences() 176 BYTE* seqHead; in ZSTD_compressSubBlock_sequences() 187 op[0] = (BYTE)((nbSeq>>8) + 0x80), op[1] = (BYTE)nbSeq, op+=2; in ZSTD_compressSubBlock_sequences() 268 const BYTE* llCode, const BYTE* mlCode, const BYTE* ofCode, in ZSTD_compressSubBlock() 276 BYTE* const ostart = (BYTE*)dst; in ZSTD_compressSubBlock() 498 BYTE const* ip = (BYTE const*)src; in ZSTD_compressSubBlock_multi() 500 BYTE* const ostart = (BYTE*)dst; in ZSTD_compressSubBlock_multi() [all …]
|
| A D | zstd_fast.c | 27 const BYTE* const iend = ((const BYTE*)end) - HASH_READ_SIZE; in ZSTD_fillHashTableForCDict() 64 const BYTE* const iend = ((const BYTE*)end) - HASH_READ_SIZE; in ZSTD_fillHashTableForCCtx() 201 const BYTE* const istart = (const BYTE*)src; in ZSTD_compressBlock_fast_noDict_generic() 210 const BYTE* ip1; in ZSTD_compressBlock_fast_noDict_generic() 211 const BYTE* ip2; in ZSTD_compressBlock_fast_noDict_generic() 212 const BYTE* ip3; in ZSTD_compressBlock_fast_noDict_generic() 492 const BYTE* const istart = (const BYTE*)src; in ZSTD_compressBlock_fast_dictMatchState_generic() 719 const BYTE* const istart = (const BYTE*)src; in ZSTD_compressBlock_fast_extDict_generic() 735 const BYTE* ip1; in ZSTD_compressBlock_fast_extDict_generic() 736 const BYTE* ip2; in ZSTD_compressBlock_fast_extDict_generic() [all …]
|
| A D | zstd_double_fast.c | 30 const BYTE* const iend = ((const BYTE*)end) - HASH_READ_SIZE; in ZSTD_fillDoubleHashTableForCDict() 68 const BYTE* const iend = ((const BYTE*)end) - HASH_READ_SIZE; in ZSTD_fillDoubleHashTableForCCtx() 116 const BYTE* const istart = (const BYTE*)src; in ZSTD_compressBlock_doubleFast_noDict_generic() 117 const BYTE* anchor = istart; in ZSTD_compressBlock_doubleFast_noDict_generic() 134 const BYTE* nextStep; in ZSTD_compressBlock_doubleFast_noDict_generic() 340 const BYTE* const istart = (const BYTE*)src; in ZSTD_compressBlock_doubleFast_dictMatchState_generic() 341 const BYTE* ip = istart; in ZSTD_compressBlock_doubleFast_dictMatchState_generic() 342 const BYTE* anchor = istart; in ZSTD_compressBlock_doubleFast_dictMatchState_generic() 621 const BYTE* const istart = (const BYTE*)src; in ZSTD_compressBlock_doubleFast_extDict_generic() 622 const BYTE* ip = istart; in ZSTD_compressBlock_doubleFast_extDict_generic() [all …]
|
| A D | zstd_ldm.c | 212 const BYTE* pIn, const BYTE* pAnchor, in ZSTD_ldm_countBackwardsMatch() 213 const BYTE* pMatch, const BYTE* pMatchBase) in ZSTD_ldm_countBackwardsMatch() 230 const BYTE* pIn, const BYTE* pAnchor, in ZSTD_ldm_countBackwardsMatch_2segments() 231 const BYTE* pMatch, const BYTE* pMatchBase, in ZSTD_ldm_countBackwardsMatch_2segments() 232 const BYTE* pExtDictStart, const BYTE* pExtDictEnd) in ZSTD_ldm_countBackwardsMatch_2segments() 255 const BYTE* const iend = (const BYTE*)end; in ZSTD_ldm_fillFastTables() 361 BYTE const* const istart = (BYTE const*)src; in ZSTD_ldm_generateSequences_internal() 366 BYTE const* ip = istart; in ZSTD_ldm_generateSequences_internal() 532 BYTE const* const istart = (BYTE const*)src; in ZSTD_ldm_generateSequences() 692 BYTE const* const istart = (BYTE const*)src; in ZSTD_ldm_blockCompress() [all …]
|
| A D | zstd_compress_literals.c | 25 const BYTE* const ip = (const BYTE*)src; in showHexa() 42 BYTE* const ostart = (BYTE*)dst; in ZSTD_noCompressLiterals() 52 ostart[0] = (BYTE)((U32)set_basic + (srcSize<<3)); in ZSTD_noCompressLiterals() 73 { const BYTE b = ((const BYTE*)src)[0]; in allBytesIdentical() 76 if (((const BYTE*)src)[p] != b) return 0; in allBytesIdentical() 84 BYTE* const ostart = (BYTE*)dst; in ZSTD_compressRleLiteralsBlock() 93 ostart[0] = (BYTE)((U32)set_rle + (srcSize<<3)); in ZSTD_compressRleLiteralsBlock() 105 ostart[flSize] = *(const BYTE*)src; in ZSTD_compressRleLiteralsBlock() 106 …DEBUGLOG(5, "RLE : Repeated Literal (%02X: %u times) -> %u bytes encoded", ((const BYTE*)src)[0], … in ZSTD_compressRleLiteralsBlock() 142 BYTE* const ostart = (BYTE*)dst; in ZSTD_compressLiterals() [all …]
|
| A D | zstd_lazy.c | 31 const BYTE* ip, const BYTE* iend, in ZSTD_updateDUBT() 92 const BYTE* match; in ZSTD_insertDUBT1() 167 const BYTE* const ip, const BYTE* const iend, in ZSTD_DUBT_findBetterDictMatch() 245 const BYTE* const ip, const BYTE* const iend, in ZSTD_DUBT_findBestMatch() 397 const BYTE* const ip, const BYTE* const iLimit, in ZSTD_BtFindBestMatch() 670 const BYTE* const ip, const BYTE* const iLimit, in ZSTD_HcFindBestMatch() 1144 const BYTE* const ip, const BYTE* const iLimit, in ZSTD_RowFindBestMatch() 1222 BYTE* tagRow = (BYTE*)(tagTable + relRow); in ZSTD_RowFindBestMatch() 1490 const BYTE* ip, in ZSTD_searchMax() 1524 const BYTE* const istart = (const BYTE*)src; in ZSTD_compressBlock_lazy_generic() [all …]
|
| A D | zstd_compress_internal.h | 99 BYTE* litStart; 101 BYTE* llCode; 102 BYTE* mlCode; 103 BYTE* ofCode; 656 BYTE* const op = (BYTE*)dst; in ZSTD_rleCompressBlock() 698 ZSTD_safecopyLiterals(BYTE* op, BYTE const* ip, BYTE const* const iend, BYTE const* ilimit_w) in ZSTD_safecopyLiterals() 846 MEM_STATIC size_t ZSTD_count(const BYTE* pIn, const BYTE* pMatch, const BYTE* const pInLimit) in ZSTD_count() 872 ZSTD_count_2segments(const BYTE* ip, const BYTE* match, in ZSTD_count_2segments() 873 const BYTE* iEnd, const BYTE* mEnd, const BYTE* iStart) in ZSTD_count_2segments() 978 BYTE const* istart = (BYTE const*)buf; in ZSTD_rollingHash_append() [all …]
|
| A D | zstd_compress_sequences.c | 74 BYTE wksp[FSE_NCOUNTBOUND]; in ZSTD_NCountCost() 247 const BYTE* codeTable, size_t nbSeq, in ZSTD_buildCTable() 252 BYTE* op = (BYTE*)dst; in ZSTD_buildCTable() 253 const BYTE* const oend = op + dstCapacity; in ZSTD_buildCTable() 258 FORWARD_IF_ERROR(FSE_buildCTable_rle(nextCTable, (BYTE)max), ""); in ZSTD_buildCTable() 296 FSE_CTable const* CTable_LitLength, BYTE const* llCodeTable, in ZSTD_encodeSequences_body() 335 BYTE const llCode = llCodeTable[n]; in ZSTD_encodeSequences_body() 336 BYTE const ofCode = ofCodeTable[n]; in ZSTD_encodeSequences_body() 337 BYTE const mlCode = mlCodeTable[n]; in ZSTD_encodeSequences_body() 390 FSE_CTable const* CTable_LitLength, BYTE const* llCodeTable, in ZSTD_encodeSequences_default() [all …]
|
| A D | huf_compress.c | 48 BYTE byte; 49 BYTE nbBits; 115 BYTE* const aligned = (BYTE*)workspace + add; in HUF_alignUpWorkspace() 149 BYTE* const ostart = (BYTE*) dst; in HUF_compressWeights() 251 BYTE* op = (BYTE*)dst; in HUF_writeCTable_wksp() 1060 const BYTE* ip = (const BYTE*) src; in HUF_compress1X_usingCTable_internal_body() 1061 BYTE* const ostart = (BYTE*)dst; in HUF_compress1X_usingCTable_internal_body() 1171 const BYTE* ip = (const BYTE*) src; in HUF_compress4X_usingCTable_internal() 1173 BYTE* const ostart = (BYTE*) dst; in HUF_compress4X_usingCTable_internal() 1223 BYTE* const ostart, BYTE* op, BYTE* const oend, in HUF_compressCTable_internal() [all …]
|
| A D | zstd_cwksp.h | 166 BYTE allocFailed; 277 void* const alloc = (BYTE*)ws->allocStart - bytes; in ZSTD_cwksp_reserve_internal_buffer_space() 316 void *const objectEnd = (BYTE *) alloc + bytesToAlign; in ZSTD_cwksp_internal_advance_phase() 426 end = (BYTE *)alloc + bytes; in ZSTD_cwksp_reserve_table() 455 void* end = (BYTE*)alloc + roundedBytes; in ZSTD_cwksp_reserve_object() 521 ZSTD_memset(ws->tableValidEnd, 0, (size_t)((BYTE*)ws->tableEnd - (BYTE*)ws->tableValidEnd)); in ZSTD_cwksp_clean_tables() 558 return (size_t)((BYTE*)ws->workspaceEnd - (BYTE*)ws->workspace); in ZSTD_cwksp_sizeof() 562 return (size_t)((BYTE*)ws->tableEnd - (BYTE*)ws->workspace) in ZSTD_cwksp_used() 563 + (size_t)((BYTE*)ws->workspaceEnd - (BYTE*)ws->allocStart); in ZSTD_cwksp_used() 575 ws->workspaceEnd = (BYTE*)start + size; in ZSTD_cwksp_init() [all …]
|
| A D | zstd_compress_sequences.h | 37 const BYTE* codeTable, size_t nbSeq, 44 FSE_CTable const* CTable_MatchLength, BYTE const* mlCodeTable, 45 FSE_CTable const* CTable_OffsetBits, BYTE const* ofCodeTable, 46 FSE_CTable const* CTable_LitLength, BYTE const* llCodeTable,
|
| A D | fse_compress.c | 239 BYTE* const ostart = (BYTE*) header; in FSE_writeNCount_generic() 240 BYTE* out = ostart; in FSE_writeNCount_generic() 271 out[0] = (BYTE) bitStream; in FSE_writeNCount_generic() 272 out[1] = (BYTE)(bitStream>>8); in FSE_writeNCount_generic() 286 out[0] = (BYTE)bitStream; in FSE_writeNCount_generic() 308 out[0] = (BYTE)bitStream; in FSE_writeNCount_generic() 309 out[1] = (BYTE)(bitStream>>8); in FSE_writeNCount_generic() 322 out[0] = (BYTE)bitStream; in FSE_writeNCount_generic() 323 out[1] = (BYTE)(bitStream>>8); in FSE_writeNCount_generic() 556 const BYTE* const istart = (const BYTE*) src; in FSE_compress_usingCTable_generic() [all …]
|
| A D | zstd_opt.c | 445 const BYTE* const ip, const BYTE* const iend, in ZSTD_insertBt1() 463 const BYTE* match; in ZSTD_insertBt1() 565 const BYTE* const ip, const BYTE* const iend, in ZSTD_updateTree_internal() 584 void ZSTD_updateTree(ZSTD_MatchState_t* ms, const BYTE* ip, const BYTE* iend) { in ZSTD_updateTree() 595 const BYTE* const ip, const BYTE* const iLimit, in ZSTD_insertBtAndGetAllMatches() 727 const BYTE* match; in ZSTD_insertBtAndGetAllMatches() 825 const BYTE*, 826 const BYTE*, 837 const BYTE* ip, in ZSTD_btGetAllMatches_internal() 1086 const BYTE* const istart = (const BYTE*)src; in ZSTD_compressBlock_opt_generic() [all …]
|
| /lib/zstd/decompress/ |
| A D | zstd_decompress_block.c | 142 { const BYTE* const istart = (const BYTE*) src; in ZSTD_decodeLiteralsBlock() 496 BYTE* spread = (BYTE*)(symbolNext + MaxSeq + 1); in ZSTD_buildFSETable_body() 699 const BYTE* const istart = (const BYTE*)src; in ZSTD_decodeSeqHeaders() 1369 BYTE const* op, BYTE const* oend, in ZSTD_assertValidSequence() 1409 const BYTE* ip = (const BYTE*)seqStart; in ZSTD_decompressSequences_bodySplitLitBuffer() 1411 BYTE* const ostart = (BYTE*)dst; in ZSTD_decompressSequences_bodySplitLitBuffer() 1621 const BYTE* ip = (const BYTE*)seqStart; in ZSTD_decompressSequences_body() 1623 BYTE* const ostart = (BYTE*)dst; in ZSTD_decompressSequences_body() 1740 const BYTE* ip = (const BYTE*)seqStart; in ZSTD_decompressSequencesLong_body() 1742 BYTE* const ostart = (BYTE*)dst; in ZSTD_decompressSequencesLong_body() [all …]
|
| A D | huf_decompress.c | 138 typedef struct { BYTE maxTableLog; BYTE tableType; BYTE tableLog; BYTE reserved; } DTableDesc; 577 BYTE* op = (BYTE*)dst; in HUF_decompress1X1_usingDTable_internal_body() 609 BYTE* const ostart = (BYTE*) dst; in HUF_decompress4X1_usingDTable_internal_body() 872 BYTE* segmentEnd = (BYTE*)dst; in HUF_decompress4X1_usingDTable_internal_fast() 931 const BYTE* ip = (const BYTE*) cSrc; in HUF_decompress4X1_DCtx_wksp() 1361 { BYTE* const ostart = (BYTE*) dst; in HUF_decompress1X2_usingDTable_internal_body() 1390 BYTE* const ostart = (BYTE*) dst; in HUF_decompress4X2_usingDTable_internal_body() 1697 BYTE* segmentEnd = (BYTE*)dst; in HUF_decompress4X2_usingDTable_internal_fast() 1755 const BYTE* ip = (const BYTE*) cSrc; in HUF_DGEN() 1770 const BYTE* ip = (const BYTE*) cSrc; in HUF_decompress4X2_DCtx_wksp() [all …]
|
| A D | zstd_decompress_internal.h | 70 BYTE nbAdditionalBits; 71 BYTE nbBits; 152 const BYTE* litPtr; 190 BYTE* litBuffer; 191 const BYTE* litBufferEnd; 193 …BYTE litExtraBuffer[ZSTD_LITBUFFEREXTRASIZE + WILDCOPY_OVERLENGTH]; /* literal buffer can be split… 194 BYTE headerBuffer[ZSTD_FRAMEHEADERSIZE_MAX];
|
| /lib/zstd/common/ |
| A D | entropy_common.c | 46 const BYTE* const istart = (const BYTE*) headerBuffer; in FSE_readNCount_body() 47 const BYTE* const iend = istart + hbSize; in FSE_readNCount_body() 48 const BYTE* ip = istart; in FSE_readNCount_body() 235 size_t HUF_readStats(BYTE* huffWeight, size_t hwSize, U32* rankStats, in HUF_readStats() 244 HUF_readStats_body(BYTE* huffWeight, size_t hwSize, U32* rankStats, in HUF_readStats_body() 251 const BYTE* ip = (const BYTE*) src; in HUF_readStats_body() 297 huffWeight[oSize] = (BYTE)lastWeight; in HUF_readStats_body() 310 static size_t HUF_readStats_body_default(BYTE* huffWeight, size_t hwSize, U32* rankStats, in HUF_readStats_body_default() 319 static BMI2_TARGET_ATTRIBUTE size_t HUF_readStats_body_bmi2(BYTE* huffWeight, size_t hwSize, U32* r… in HUF_readStats_body_bmi2() 328 size_t HUF_readStats_wksp(BYTE* huffWeight, size_t hwSize, U32* rankStats, in HUF_readStats_wksp()
|
| A D | zstd_internal.h | 186 BYTE copy16_buf[16]; in ZSTD_copy16() 212 ptrdiff_t diff = (BYTE*)dst - (const BYTE*)src; in ZSTD_wildcopy() 213 const BYTE* ip = (const BYTE*)src; in ZSTD_wildcopy() 214 BYTE* op = (BYTE*)dst; in ZSTD_wildcopy() 215 BYTE* const oend = op + length; in ZSTD_wildcopy()
|
| A D | fse_decompress.c | 64 BYTE* spread = (BYTE*)(symbolNext + maxSymbolValue + 1); in FSE_buildDTable_internal() 155 tableDecode[u].nbBits = (BYTE) (tableLog - ZSTD_highbit32(nextState) ); in FSE_buildDTable_internal() 179 BYTE* const ostart = (BYTE*) dst; in FSE_decompress_usingDTable_generic() 180 BYTE* op = ostart; in FSE_decompress_usingDTable_generic() 181 BYTE* const omax = op + maxDstSize; in FSE_decompress_usingDTable_generic() 182 BYTE* const olimit = omax-3; in FSE_decompress_usingDTable_generic() 250 const BYTE* const istart = (const BYTE*)cSrc; in FSE_decompress_wksp_body() 251 const BYTE* ip = istart; in FSE_decompress_wksp_body() 276 workSpace = (BYTE*)workSpace + sizeof(*wksp) + FSE_DTABLE_SIZE(tableLog); in FSE_decompress_wksp_body()
|
| A D | bitstream.h | 249 { BYTE const lastByte = ((const BYTE*)srcBuffer)[srcSize-1]; in BIT_initDStream() 254 bitD->bitContainer = *(const BYTE*)(bitD->start); in BIT_initDStream() 257 …case 7: bitD->bitContainer += (BitContainerType)(((const BYTE*)(srcBuffer))[6]) << (sizeof(bitD->b… in BIT_initDStream() 260 …case 6: bitD->bitContainer += (BitContainerType)(((const BYTE*)(srcBuffer))[5]) << (sizeof(bitD->b… in BIT_initDStream() 263 …case 5: bitD->bitContainer += (BitContainerType)(((const BYTE*)(srcBuffer))[4]) << (sizeof(bitD->b… in BIT_initDStream() 266 case 4: bitD->bitContainer += (BitContainerType)(((const BYTE*)(srcBuffer))[3]) << 24; in BIT_initDStream() 269 case 3: bitD->bitContainer += (BitContainerType)(((const BYTE*)(srcBuffer))[2]) << 16; in BIT_initDStream() 272 case 2: bitD->bitContainer += (BitContainerType)(((const BYTE*)(srcBuffer))[1]) << 8; in BIT_initDStream() 277 { BYTE const lastByte = ((const BYTE*)srcBuffer)[srcSize-1]; in BIT_initDStream()
|