Lines Matching refs:ws
176 MEM_STATIC size_t ZSTD_cwksp_available_space(ZSTD_cwksp* ws);
177 MEM_STATIC void* ZSTD_cwksp_initialAllocStart(ZSTD_cwksp* ws);
179 MEM_STATIC void ZSTD_cwksp_assert_internal_consistency(ZSTD_cwksp* ws) { in ZSTD_cwksp_assert_internal_consistency() argument
180 (void)ws; in ZSTD_cwksp_assert_internal_consistency()
181 assert(ws->workspace <= ws->objectEnd); in ZSTD_cwksp_assert_internal_consistency()
182 assert(ws->objectEnd <= ws->tableEnd); in ZSTD_cwksp_assert_internal_consistency()
183 assert(ws->objectEnd <= ws->tableValidEnd); in ZSTD_cwksp_assert_internal_consistency()
184 assert(ws->tableEnd <= ws->allocStart); in ZSTD_cwksp_assert_internal_consistency()
185 assert(ws->tableValidEnd <= ws->allocStart); in ZSTD_cwksp_assert_internal_consistency()
186 assert(ws->allocStart <= ws->workspaceEnd); in ZSTD_cwksp_assert_internal_consistency()
187 assert(ws->initOnceStart <= ZSTD_cwksp_initialAllocStart(ws)); in ZSTD_cwksp_assert_internal_consistency()
188 assert(ws->workspace <= ws->initOnceStart); in ZSTD_cwksp_assert_internal_consistency()
259 MEM_STATIC void* ZSTD_cwksp_initialAllocStart(ZSTD_cwksp* ws) in ZSTD_cwksp_initialAllocStart() argument
261 char* endPtr = (char*)ws->workspaceEnd; in ZSTD_cwksp_initialAllocStart()
275 ZSTD_cwksp_reserve_internal_buffer_space(ZSTD_cwksp* ws, size_t const bytes) in ZSTD_cwksp_reserve_internal_buffer_space() argument
277 void* const alloc = (BYTE*)ws->allocStart - bytes; in ZSTD_cwksp_reserve_internal_buffer_space()
278 void* const bottom = ws->tableEnd; in ZSTD_cwksp_reserve_internal_buffer_space()
280 alloc, bytes, ZSTD_cwksp_available_space(ws) - bytes); in ZSTD_cwksp_reserve_internal_buffer_space()
281 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_reserve_internal_buffer_space()
285 ws->allocFailed = 1; in ZSTD_cwksp_reserve_internal_buffer_space()
290 if (alloc < ws->tableValidEnd) { in ZSTD_cwksp_reserve_internal_buffer_space()
291 ws->tableValidEnd = alloc; in ZSTD_cwksp_reserve_internal_buffer_space()
293 ws->allocStart = alloc; in ZSTD_cwksp_reserve_internal_buffer_space()
303 ZSTD_cwksp_internal_advance_phase(ZSTD_cwksp* ws, ZSTD_cwksp_alloc_phase_e phase) in ZSTD_cwksp_internal_advance_phase() argument
305 assert(phase >= ws->phase); in ZSTD_cwksp_internal_advance_phase()
306 if (phase > ws->phase) { in ZSTD_cwksp_internal_advance_phase()
308 if (ws->phase < ZSTD_cwksp_alloc_aligned_init_once && in ZSTD_cwksp_internal_advance_phase()
310 ws->tableValidEnd = ws->objectEnd; in ZSTD_cwksp_internal_advance_phase()
311 ws->initOnceStart = ZSTD_cwksp_initialAllocStart(ws); in ZSTD_cwksp_internal_advance_phase()
314 void *const alloc = ws->objectEnd; in ZSTD_cwksp_internal_advance_phase()
318 RETURN_ERROR_IF(objectEnd > ws->workspaceEnd, memory_allocation, in ZSTD_cwksp_internal_advance_phase()
320 ws->objectEnd = objectEnd; in ZSTD_cwksp_internal_advance_phase()
321 ws->tableEnd = objectEnd; /* table area starts being empty */ in ZSTD_cwksp_internal_advance_phase()
322 if (ws->tableValidEnd < ws->tableEnd) { in ZSTD_cwksp_internal_advance_phase()
323 ws->tableValidEnd = ws->tableEnd; in ZSTD_cwksp_internal_advance_phase()
327 ws->phase = phase; in ZSTD_cwksp_internal_advance_phase()
328 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_internal_advance_phase()
336 MEM_STATIC int ZSTD_cwksp_owns_buffer(const ZSTD_cwksp* ws, const void* ptr) in ZSTD_cwksp_owns_buffer() argument
338 return (ptr != NULL) && (ws->workspace <= ptr) && (ptr < ws->workspaceEnd); in ZSTD_cwksp_owns_buffer()
345 ZSTD_cwksp_reserve_internal(ZSTD_cwksp* ws, size_t bytes, ZSTD_cwksp_alloc_phase_e phase) in ZSTD_cwksp_reserve_internal() argument
348 if (ZSTD_isError(ZSTD_cwksp_internal_advance_phase(ws, phase)) || bytes == 0) { in ZSTD_cwksp_reserve_internal()
353 alloc = ZSTD_cwksp_reserve_internal_buffer_space(ws, bytes); in ZSTD_cwksp_reserve_internal()
362 MEM_STATIC BYTE* ZSTD_cwksp_reserve_buffer(ZSTD_cwksp* ws, size_t bytes) in ZSTD_cwksp_reserve_buffer() argument
364 return (BYTE*)ZSTD_cwksp_reserve_internal(ws, bytes, ZSTD_cwksp_alloc_buffers); in ZSTD_cwksp_reserve_buffer()
376 MEM_STATIC void* ZSTD_cwksp_reserve_aligned_init_once(ZSTD_cwksp* ws, size_t bytes) in ZSTD_cwksp_reserve_aligned_init_once() argument
379 void* ptr = ZSTD_cwksp_reserve_internal(ws, alignedBytes, ZSTD_cwksp_alloc_aligned_init_once); in ZSTD_cwksp_reserve_aligned_init_once()
381 if(ptr && ptr < ws->initOnceStart) { in ZSTD_cwksp_reserve_aligned_init_once()
388 ZSTD_memset(ptr, 0, MIN((size_t)((U8*)ws->initOnceStart - (U8*)ptr), alignedBytes)); in ZSTD_cwksp_reserve_aligned_init_once()
389 ws->initOnceStart = ptr; in ZSTD_cwksp_reserve_aligned_init_once()
397 MEM_STATIC void* ZSTD_cwksp_reserve_aligned64(ZSTD_cwksp* ws, size_t bytes) in ZSTD_cwksp_reserve_aligned64() argument
399 void* const ptr = ZSTD_cwksp_reserve_internal(ws, in ZSTD_cwksp_reserve_aligned64()
411 MEM_STATIC void* ZSTD_cwksp_reserve_table(ZSTD_cwksp* ws, size_t bytes) in ZSTD_cwksp_reserve_table() argument
420 if(ws->phase < phase) { in ZSTD_cwksp_reserve_table()
421 if (ZSTD_isError(ZSTD_cwksp_internal_advance_phase(ws, phase))) { in ZSTD_cwksp_reserve_table()
425 alloc = ws->tableEnd; in ZSTD_cwksp_reserve_table()
427 top = ws->allocStart; in ZSTD_cwksp_reserve_table()
430 alloc, bytes, ZSTD_cwksp_available_space(ws) - bytes); in ZSTD_cwksp_reserve_table()
432 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_reserve_table()
436 ws->allocFailed = 1; in ZSTD_cwksp_reserve_table()
439 ws->tableEnd = end; in ZSTD_cwksp_reserve_table()
451 MEM_STATIC void* ZSTD_cwksp_reserve_object(ZSTD_cwksp* ws, size_t bytes) in ZSTD_cwksp_reserve_object() argument
454 void* alloc = ws->objectEnd; in ZSTD_cwksp_reserve_object()
460 alloc, bytes, roundedBytes, ZSTD_cwksp_available_space(ws) - roundedBytes); in ZSTD_cwksp_reserve_object()
463 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_reserve_object()
465 if (ws->phase != ZSTD_cwksp_alloc_objects || end > ws->workspaceEnd) { in ZSTD_cwksp_reserve_object()
467 ws->allocFailed = 1; in ZSTD_cwksp_reserve_object()
470 ws->objectEnd = end; in ZSTD_cwksp_reserve_object()
471 ws->tableEnd = end; in ZSTD_cwksp_reserve_object()
472 ws->tableValidEnd = end; in ZSTD_cwksp_reserve_object()
481 MEM_STATIC void* ZSTD_cwksp_reserve_object_aligned(ZSTD_cwksp* ws, size_t byteSize, size_t alignmen… in ZSTD_cwksp_reserve_object_aligned() argument
485 void* const start = ZSTD_cwksp_reserve_object(ws, byteSize + surplus); in ZSTD_cwksp_reserve_object_aligned()
492 MEM_STATIC void ZSTD_cwksp_mark_tables_dirty(ZSTD_cwksp* ws) in ZSTD_cwksp_mark_tables_dirty() argument
497 assert(ws->tableValidEnd >= ws->objectEnd); in ZSTD_cwksp_mark_tables_dirty()
498 assert(ws->tableValidEnd <= ws->allocStart); in ZSTD_cwksp_mark_tables_dirty()
499 ws->tableValidEnd = ws->objectEnd; in ZSTD_cwksp_mark_tables_dirty()
500 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_mark_tables_dirty()
503 MEM_STATIC void ZSTD_cwksp_mark_tables_clean(ZSTD_cwksp* ws) { in ZSTD_cwksp_mark_tables_clean() argument
505 assert(ws->tableValidEnd >= ws->objectEnd); in ZSTD_cwksp_mark_tables_clean()
506 assert(ws->tableValidEnd <= ws->allocStart); in ZSTD_cwksp_mark_tables_clean()
507 if (ws->tableValidEnd < ws->tableEnd) { in ZSTD_cwksp_mark_tables_clean()
508 ws->tableValidEnd = ws->tableEnd; in ZSTD_cwksp_mark_tables_clean()
510 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_mark_tables_clean()
516 MEM_STATIC void ZSTD_cwksp_clean_tables(ZSTD_cwksp* ws) { in ZSTD_cwksp_clean_tables() argument
518 assert(ws->tableValidEnd >= ws->objectEnd); in ZSTD_cwksp_clean_tables()
519 assert(ws->tableValidEnd <= ws->allocStart); in ZSTD_cwksp_clean_tables()
520 if (ws->tableValidEnd < ws->tableEnd) { in ZSTD_cwksp_clean_tables()
521 ZSTD_memset(ws->tableValidEnd, 0, (size_t)((BYTE*)ws->tableEnd - (BYTE*)ws->tableValidEnd)); in ZSTD_cwksp_clean_tables()
523 ZSTD_cwksp_mark_tables_clean(ws); in ZSTD_cwksp_clean_tables()
530 MEM_STATIC void ZSTD_cwksp_clear_tables(ZSTD_cwksp* ws) in ZSTD_cwksp_clear_tables() argument
535 ws->tableEnd = ws->objectEnd; in ZSTD_cwksp_clear_tables()
536 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_clear_tables()
543 MEM_STATIC void ZSTD_cwksp_clear(ZSTD_cwksp* ws) { in ZSTD_cwksp_clear() argument
548 ws->tableEnd = ws->objectEnd; in ZSTD_cwksp_clear()
549 ws->allocStart = ZSTD_cwksp_initialAllocStart(ws); in ZSTD_cwksp_clear()
550 ws->allocFailed = 0; in ZSTD_cwksp_clear()
551 if (ws->phase > ZSTD_cwksp_alloc_aligned_init_once) { in ZSTD_cwksp_clear()
552 ws->phase = ZSTD_cwksp_alloc_aligned_init_once; in ZSTD_cwksp_clear()
554 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_clear()
557 MEM_STATIC size_t ZSTD_cwksp_sizeof(const ZSTD_cwksp* ws) { in ZSTD_cwksp_sizeof() argument
558 return (size_t)((BYTE*)ws->workspaceEnd - (BYTE*)ws->workspace); in ZSTD_cwksp_sizeof()
561 MEM_STATIC size_t ZSTD_cwksp_used(const ZSTD_cwksp* ws) { in ZSTD_cwksp_used() argument
562 return (size_t)((BYTE*)ws->tableEnd - (BYTE*)ws->workspace) in ZSTD_cwksp_used()
563 + (size_t)((BYTE*)ws->workspaceEnd - (BYTE*)ws->allocStart); in ZSTD_cwksp_used()
571 MEM_STATIC void ZSTD_cwksp_init(ZSTD_cwksp* ws, void* start, size_t size, ZSTD_cwksp_static_alloc_e… in ZSTD_cwksp_init() argument
574 ws->workspace = start; in ZSTD_cwksp_init()
575 ws->workspaceEnd = (BYTE*)start + size; in ZSTD_cwksp_init()
576 ws->objectEnd = ws->workspace; in ZSTD_cwksp_init()
577 ws->tableValidEnd = ws->objectEnd; in ZSTD_cwksp_init()
578 ws->initOnceStart = ZSTD_cwksp_initialAllocStart(ws); in ZSTD_cwksp_init()
579 ws->phase = ZSTD_cwksp_alloc_objects; in ZSTD_cwksp_init()
580 ws->isStatic = isStatic; in ZSTD_cwksp_init()
581 ZSTD_cwksp_clear(ws); in ZSTD_cwksp_init()
582 ws->workspaceOversizedDuration = 0; in ZSTD_cwksp_init()
583 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_init()
586 MEM_STATIC size_t ZSTD_cwksp_create(ZSTD_cwksp* ws, size_t size, ZSTD_customMem customMem) { in ZSTD_cwksp_create() argument
590 ZSTD_cwksp_init(ws, workspace, size, ZSTD_cwksp_dynamic_alloc); in ZSTD_cwksp_create()
594 MEM_STATIC void ZSTD_cwksp_free(ZSTD_cwksp* ws, ZSTD_customMem customMem) { in ZSTD_cwksp_free() argument
595 void *ptr = ws->workspace; in ZSTD_cwksp_free()
597 ZSTD_memset(ws, 0, sizeof(ZSTD_cwksp)); in ZSTD_cwksp_free()
610 MEM_STATIC int ZSTD_cwksp_reserve_failed(const ZSTD_cwksp* ws) { in ZSTD_cwksp_reserve_failed() argument
611 return ws->allocFailed; in ZSTD_cwksp_reserve_failed()
622 MEM_STATIC int ZSTD_cwksp_estimated_space_within_bounds(const ZSTD_cwksp *const ws, size_t const es… in ZSTD_cwksp_estimated_space_within_bounds() argument
625 return (estimatedSpace - ZSTD_cwksp_slack_space_required()) <= ZSTD_cwksp_used(ws) && in ZSTD_cwksp_estimated_space_within_bounds()
626 ZSTD_cwksp_used(ws) <= estimatedSpace; in ZSTD_cwksp_estimated_space_within_bounds()
630 MEM_STATIC size_t ZSTD_cwksp_available_space(ZSTD_cwksp* ws) { in ZSTD_cwksp_available_space() argument
631 return (size_t)((BYTE*)ws->allocStart - (BYTE*)ws->tableEnd); in ZSTD_cwksp_available_space()
634 MEM_STATIC int ZSTD_cwksp_check_available(ZSTD_cwksp* ws, size_t additionalNeededSpace) { in ZSTD_cwksp_check_available() argument
635 return ZSTD_cwksp_available_space(ws) >= additionalNeededSpace; in ZSTD_cwksp_check_available()
638 MEM_STATIC int ZSTD_cwksp_check_too_large(ZSTD_cwksp* ws, size_t additionalNeededSpace) { in ZSTD_cwksp_check_too_large() argument
640 ws, additionalNeededSpace * ZSTD_WORKSPACETOOLARGE_FACTOR); in ZSTD_cwksp_check_too_large()
643 MEM_STATIC int ZSTD_cwksp_check_wasteful(ZSTD_cwksp* ws, size_t additionalNeededSpace) { in ZSTD_cwksp_check_wasteful() argument
644 return ZSTD_cwksp_check_too_large(ws, additionalNeededSpace) in ZSTD_cwksp_check_wasteful()
645 && ws->workspaceOversizedDuration > ZSTD_WORKSPACETOOLARGE_MAXDURATION; in ZSTD_cwksp_check_wasteful()
649 ZSTD_cwksp* ws, size_t additionalNeededSpace) { in ZSTD_cwksp_bump_oversized_duration() argument
650 if (ZSTD_cwksp_check_too_large(ws, additionalNeededSpace)) { in ZSTD_cwksp_bump_oversized_duration()
651 ws->workspaceOversizedDuration++; in ZSTD_cwksp_bump_oversized_duration()
653 ws->workspaceOversizedDuration = 0; in ZSTD_cwksp_bump_oversized_duration()