Lines Matching refs:ws

161 MEM_STATIC size_t ZSTD_cwksp_available_space(ZSTD_cwksp* ws);
163 MEM_STATIC void ZSTD_cwksp_assert_internal_consistency(ZSTD_cwksp* ws) { in ZSTD_cwksp_assert_internal_consistency() argument
164 (void)ws; in ZSTD_cwksp_assert_internal_consistency()
165 assert(ws->workspace <= ws->objectEnd); in ZSTD_cwksp_assert_internal_consistency()
166 assert(ws->objectEnd <= ws->tableEnd); in ZSTD_cwksp_assert_internal_consistency()
167 assert(ws->objectEnd <= ws->tableValidEnd); in ZSTD_cwksp_assert_internal_consistency()
168 assert(ws->tableEnd <= ws->allocStart); in ZSTD_cwksp_assert_internal_consistency()
169 assert(ws->tableValidEnd <= ws->allocStart); in ZSTD_cwksp_assert_internal_consistency()
170 assert(ws->allocStart <= ws->workspaceEnd); in ZSTD_cwksp_assert_internal_consistency()
245 ZSTD_cwksp_reserve_internal_buffer_space(ZSTD_cwksp* ws, size_t const bytes) in ZSTD_cwksp_reserve_internal_buffer_space() argument
247 void* const alloc = (BYTE*)ws->allocStart - bytes; in ZSTD_cwksp_reserve_internal_buffer_space()
248 void* const bottom = ws->tableEnd; in ZSTD_cwksp_reserve_internal_buffer_space()
250 alloc, bytes, ZSTD_cwksp_available_space(ws) - bytes); in ZSTD_cwksp_reserve_internal_buffer_space()
251 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_reserve_internal_buffer_space()
255 ws->allocFailed = 1; in ZSTD_cwksp_reserve_internal_buffer_space()
260 if (alloc < ws->tableValidEnd) { in ZSTD_cwksp_reserve_internal_buffer_space()
261 ws->tableValidEnd = alloc; in ZSTD_cwksp_reserve_internal_buffer_space()
263 ws->allocStart = alloc; in ZSTD_cwksp_reserve_internal_buffer_space()
273 ZSTD_cwksp_internal_advance_phase(ZSTD_cwksp* ws, ZSTD_cwksp_alloc_phase_e phase) in ZSTD_cwksp_internal_advance_phase() argument
275 assert(phase >= ws->phase); in ZSTD_cwksp_internal_advance_phase()
276 if (phase > ws->phase) { in ZSTD_cwksp_internal_advance_phase()
278 if (ws->phase < ZSTD_cwksp_alloc_buffers && in ZSTD_cwksp_internal_advance_phase()
280 ws->tableValidEnd = ws->objectEnd; in ZSTD_cwksp_internal_advance_phase()
284 if (ws->phase < ZSTD_cwksp_alloc_aligned && in ZSTD_cwksp_internal_advance_phase()
288 …ZSTD_CWKSP_ALIGNMENT_BYTES - ZSTD_cwksp_bytes_to_align_ptr(ws->allocStart, ZSTD_CWKSP_ALIGNMENT_BY… in ZSTD_cwksp_internal_advance_phase()
291 RETURN_ERROR_IF(!ZSTD_cwksp_reserve_internal_buffer_space(ws, bytesToAlign), in ZSTD_cwksp_internal_advance_phase()
295 void* const alloc = ws->objectEnd; in ZSTD_cwksp_internal_advance_phase()
299 RETURN_ERROR_IF(objectEnd > ws->workspaceEnd, memory_allocation, in ZSTD_cwksp_internal_advance_phase()
301 ws->objectEnd = objectEnd; in ZSTD_cwksp_internal_advance_phase()
302 ws->tableEnd = objectEnd; /* table area starts being empty */ in ZSTD_cwksp_internal_advance_phase()
303 if (ws->tableValidEnd < ws->tableEnd) { in ZSTD_cwksp_internal_advance_phase()
304 ws->tableValidEnd = ws->tableEnd; in ZSTD_cwksp_internal_advance_phase()
306 ws->phase = phase; in ZSTD_cwksp_internal_advance_phase()
307 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_internal_advance_phase()
315 MEM_STATIC int ZSTD_cwksp_owns_buffer(const ZSTD_cwksp* ws, const void* ptr) in ZSTD_cwksp_owns_buffer() argument
317 return (ptr != NULL) && (ws->workspace <= ptr) && (ptr <= ws->workspaceEnd); in ZSTD_cwksp_owns_buffer()
324 ZSTD_cwksp_reserve_internal(ZSTD_cwksp* ws, size_t bytes, ZSTD_cwksp_alloc_phase_e phase) in ZSTD_cwksp_reserve_internal() argument
327 if (ZSTD_isError(ZSTD_cwksp_internal_advance_phase(ws, phase)) || bytes == 0) { in ZSTD_cwksp_reserve_internal()
332 alloc = ZSTD_cwksp_reserve_internal_buffer_space(ws, bytes); in ZSTD_cwksp_reserve_internal()
341 MEM_STATIC BYTE* ZSTD_cwksp_reserve_buffer(ZSTD_cwksp* ws, size_t bytes) in ZSTD_cwksp_reserve_buffer() argument
343 return (BYTE*)ZSTD_cwksp_reserve_internal(ws, bytes, ZSTD_cwksp_alloc_buffers); in ZSTD_cwksp_reserve_buffer()
349 MEM_STATIC void* ZSTD_cwksp_reserve_aligned(ZSTD_cwksp* ws, size_t bytes) in ZSTD_cwksp_reserve_aligned() argument
351 void* ptr = ZSTD_cwksp_reserve_internal(ws, ZSTD_cwksp_align(bytes, ZSTD_CWKSP_ALIGNMENT_BYTES), in ZSTD_cwksp_reserve_aligned()
362 MEM_STATIC void* ZSTD_cwksp_reserve_table(ZSTD_cwksp* ws, size_t bytes) in ZSTD_cwksp_reserve_table() argument
369 if (ZSTD_isError(ZSTD_cwksp_internal_advance_phase(ws, phase))) { in ZSTD_cwksp_reserve_table()
372 alloc = ws->tableEnd; in ZSTD_cwksp_reserve_table()
374 top = ws->allocStart; in ZSTD_cwksp_reserve_table()
377 alloc, bytes, ZSTD_cwksp_available_space(ws) - bytes); in ZSTD_cwksp_reserve_table()
379 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_reserve_table()
383 ws->allocFailed = 1; in ZSTD_cwksp_reserve_table()
386 ws->tableEnd = end; in ZSTD_cwksp_reserve_table()
398 MEM_STATIC void* ZSTD_cwksp_reserve_object(ZSTD_cwksp* ws, size_t bytes) in ZSTD_cwksp_reserve_object() argument
401 void* alloc = ws->objectEnd; in ZSTD_cwksp_reserve_object()
407 alloc, bytes, roundedBytes, ZSTD_cwksp_available_space(ws) - roundedBytes); in ZSTD_cwksp_reserve_object()
410 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_reserve_object()
412 if (ws->phase != ZSTD_cwksp_alloc_objects || end > ws->workspaceEnd) { in ZSTD_cwksp_reserve_object()
414 ws->allocFailed = 1; in ZSTD_cwksp_reserve_object()
417 ws->objectEnd = end; in ZSTD_cwksp_reserve_object()
418 ws->tableEnd = end; in ZSTD_cwksp_reserve_object()
419 ws->tableValidEnd = end; in ZSTD_cwksp_reserve_object()
425 MEM_STATIC void ZSTD_cwksp_mark_tables_dirty(ZSTD_cwksp* ws) in ZSTD_cwksp_mark_tables_dirty() argument
430 assert(ws->tableValidEnd >= ws->objectEnd); in ZSTD_cwksp_mark_tables_dirty()
431 assert(ws->tableValidEnd <= ws->allocStart); in ZSTD_cwksp_mark_tables_dirty()
432 ws->tableValidEnd = ws->objectEnd; in ZSTD_cwksp_mark_tables_dirty()
433 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_mark_tables_dirty()
436 MEM_STATIC void ZSTD_cwksp_mark_tables_clean(ZSTD_cwksp* ws) { in ZSTD_cwksp_mark_tables_clean() argument
438 assert(ws->tableValidEnd >= ws->objectEnd); in ZSTD_cwksp_mark_tables_clean()
439 assert(ws->tableValidEnd <= ws->allocStart); in ZSTD_cwksp_mark_tables_clean()
440 if (ws->tableValidEnd < ws->tableEnd) { in ZSTD_cwksp_mark_tables_clean()
441 ws->tableValidEnd = ws->tableEnd; in ZSTD_cwksp_mark_tables_clean()
443 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_mark_tables_clean()
449 MEM_STATIC void ZSTD_cwksp_clean_tables(ZSTD_cwksp* ws) { in ZSTD_cwksp_clean_tables() argument
451 assert(ws->tableValidEnd >= ws->objectEnd); in ZSTD_cwksp_clean_tables()
452 assert(ws->tableValidEnd <= ws->allocStart); in ZSTD_cwksp_clean_tables()
453 if (ws->tableValidEnd < ws->tableEnd) { in ZSTD_cwksp_clean_tables()
454 ZSTD_memset(ws->tableValidEnd, 0, (BYTE*)ws->tableEnd - (BYTE*)ws->tableValidEnd); in ZSTD_cwksp_clean_tables()
456 ZSTD_cwksp_mark_tables_clean(ws); in ZSTD_cwksp_clean_tables()
463 MEM_STATIC void ZSTD_cwksp_clear_tables(ZSTD_cwksp* ws) { in ZSTD_cwksp_clear_tables() argument
467 ws->tableEnd = ws->objectEnd; in ZSTD_cwksp_clear_tables()
468 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_clear_tables()
475 MEM_STATIC void ZSTD_cwksp_clear(ZSTD_cwksp* ws) { in ZSTD_cwksp_clear() argument
480 ws->tableEnd = ws->objectEnd; in ZSTD_cwksp_clear()
481 ws->allocStart = ws->workspaceEnd; in ZSTD_cwksp_clear()
482 ws->allocFailed = 0; in ZSTD_cwksp_clear()
483 if (ws->phase > ZSTD_cwksp_alloc_buffers) { in ZSTD_cwksp_clear()
484 ws->phase = ZSTD_cwksp_alloc_buffers; in ZSTD_cwksp_clear()
486 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_clear()
494 MEM_STATIC void ZSTD_cwksp_init(ZSTD_cwksp* ws, void* start, size_t size, ZSTD_cwksp_static_alloc_e… in ZSTD_cwksp_init() argument
497 ws->workspace = start; in ZSTD_cwksp_init()
498 ws->workspaceEnd = (BYTE*)start + size; in ZSTD_cwksp_init()
499 ws->objectEnd = ws->workspace; in ZSTD_cwksp_init()
500 ws->tableValidEnd = ws->objectEnd; in ZSTD_cwksp_init()
501 ws->phase = ZSTD_cwksp_alloc_objects; in ZSTD_cwksp_init()
502 ws->isStatic = isStatic; in ZSTD_cwksp_init()
503 ZSTD_cwksp_clear(ws); in ZSTD_cwksp_init()
504 ws->workspaceOversizedDuration = 0; in ZSTD_cwksp_init()
505 ZSTD_cwksp_assert_internal_consistency(ws); in ZSTD_cwksp_init()
508 MEM_STATIC size_t ZSTD_cwksp_create(ZSTD_cwksp* ws, size_t size, ZSTD_customMem customMem) { in ZSTD_cwksp_create() argument
512 ZSTD_cwksp_init(ws, workspace, size, ZSTD_cwksp_dynamic_alloc); in ZSTD_cwksp_create()
516 MEM_STATIC void ZSTD_cwksp_free(ZSTD_cwksp* ws, ZSTD_customMem customMem) { in ZSTD_cwksp_free() argument
517 void *ptr = ws->workspace; in ZSTD_cwksp_free()
519 ZSTD_memset(ws, 0, sizeof(ZSTD_cwksp)); in ZSTD_cwksp_free()
532 MEM_STATIC size_t ZSTD_cwksp_sizeof(const ZSTD_cwksp* ws) { in ZSTD_cwksp_sizeof() argument
533 return (size_t)((BYTE*)ws->workspaceEnd - (BYTE*)ws->workspace); in ZSTD_cwksp_sizeof()
536 MEM_STATIC size_t ZSTD_cwksp_used(const ZSTD_cwksp* ws) { in ZSTD_cwksp_used() argument
537 return (size_t)((BYTE*)ws->tableEnd - (BYTE*)ws->workspace) in ZSTD_cwksp_used()
538 + (size_t)((BYTE*)ws->workspaceEnd - (BYTE*)ws->allocStart); in ZSTD_cwksp_used()
541 MEM_STATIC int ZSTD_cwksp_reserve_failed(const ZSTD_cwksp* ws) { in ZSTD_cwksp_reserve_failed() argument
542 return ws->allocFailed; in ZSTD_cwksp_reserve_failed()
553 MEM_STATIC int ZSTD_cwksp_estimated_space_within_bounds(const ZSTD_cwksp* const ws, in ZSTD_cwksp_estimated_space_within_bounds() argument
557 return ZSTD_cwksp_used(ws) == estimatedSpace; in ZSTD_cwksp_estimated_space_within_bounds()
562 …return (ZSTD_cwksp_used(ws) >= estimatedSpace - 63) && (ZSTD_cwksp_used(ws) <= estimatedSpace + 63… in ZSTD_cwksp_estimated_space_within_bounds()
567 MEM_STATIC size_t ZSTD_cwksp_available_space(ZSTD_cwksp* ws) { in ZSTD_cwksp_available_space() argument
568 return (size_t)((BYTE*)ws->allocStart - (BYTE*)ws->tableEnd); in ZSTD_cwksp_available_space()
571 MEM_STATIC int ZSTD_cwksp_check_available(ZSTD_cwksp* ws, size_t additionalNeededSpace) { in ZSTD_cwksp_check_available() argument
572 return ZSTD_cwksp_available_space(ws) >= additionalNeededSpace; in ZSTD_cwksp_check_available()
575 MEM_STATIC int ZSTD_cwksp_check_too_large(ZSTD_cwksp* ws, size_t additionalNeededSpace) { in ZSTD_cwksp_check_too_large() argument
577 ws, additionalNeededSpace * ZSTD_WORKSPACETOOLARGE_FACTOR); in ZSTD_cwksp_check_too_large()
580 MEM_STATIC int ZSTD_cwksp_check_wasteful(ZSTD_cwksp* ws, size_t additionalNeededSpace) { in ZSTD_cwksp_check_wasteful() argument
581 return ZSTD_cwksp_check_too_large(ws, additionalNeededSpace) in ZSTD_cwksp_check_wasteful()
582 && ws->workspaceOversizedDuration > ZSTD_WORKSPACETOOLARGE_MAXDURATION; in ZSTD_cwksp_check_wasteful()
586 ZSTD_cwksp* ws, size_t additionalNeededSpace) { in ZSTD_cwksp_bump_oversized_duration() argument
587 if (ZSTD_cwksp_check_too_large(ws, additionalNeededSpace)) { in ZSTD_cwksp_bump_oversized_duration()
588 ws->workspaceOversizedDuration++; in ZSTD_cwksp_bump_oversized_duration()
590 ws->workspaceOversizedDuration = 0; in ZSTD_cwksp_bump_oversized_duration()