1 /*
2  * Copyright 2022 The Hafnium Authors.
3  *
4  * Use of this source code is governed by a BSD-style
5  * license that can be found in the LICENSE file or at
6  * https://opensource.org/licenses/BSD-3-Clause.
7  */
8 
9 #pragma once
10 
11 #include "vmapi/hf/types.h"
12 
13 /**
14  * RT value that indicates an access to register XZR (always 0).
15  * See Arm Architecture Reference Manual Armv8-A, C1.2.5
16  */
17 #define RT_REG_XZR (UINT64_C(31))
18 
19 /**
20  * Hypervisor (EL2) Cycle Count Disable.
21  */
22 #define MDCR_EL2_HCCD (UINT64_C(0x1) << 23)
23 
24 /**
25  * Controls traps for Trace Filter when Self-hosted Trace is implemented.
26  */
27 #define MDCR_EL2_TTRF (UINT64_C(0x1) << 19)
28 
29 /**
30  * Hypervisor (EL2) Event Count Disable.
31  */
32 #define MDCR_EL2_HPMD (UINT64_C(0x1) << 17)
33 
34 /**
35  * Trap Performance Monitor Sampling.
36  * Traps access to Statistical Profiling control registers from EL1 when
37  * the Statistical Profiling Extension (SPE) is implemented.
38  */
39 #define MDCR_EL2_TPMS (UINT64_C(0x1) << 14)
40 
41 /**
42  * Controls the owning translation regime and access to Profiling Buffer control
43  * registers from EL1. Depends on whether SPE is implemented.
44  */
45 #define MDCR_EL2_E2PB (UINT64_C(0x3) << 12)
46 
47 /**
48  * Controls traps for Debug ROM.
49  */
50 #define MDCR_EL2_TDRA (UINT64_C(0x1) << 11)
51 
52 /**
53  * Controls traps for debug OS-Related Register accesses when DoubleLock is
54  * implemented.
55  */
56 #define MDCR_EL2_TDOSA (UINT64_C(0x1) << 10)
57 
58 /**
59  * Controls traps for remaining Debug Registers not trapped by TDRA and TDOSA.
60  */
61 #define MDCR_EL2_TDA (UINT64_C(0x1) << 9)
62 
63 /**
64  * Controls traps for all debug exceptions (e.g., breakpoints).
65  */
66 #define MDCR_EL2_TDE (UINT64_C(0x1) << 8)
67 
68 /**
69  * Controls traps for all performance monitor register accesses other than
70  * PMCR_EL0.
71  */
72 #define MDCR_EL2_TPM (UINT64_C(0x1) << 6)
73 
74 /**
75  * Controls traps for performance monitor register PMCR_EL0.
76  */
77 #define MDCR_EL2_TPMCR (UINT64_C(0x1) << 5)
78 
79 /**
80  * Defines the number of event counters that are accessible from various
81  * exception levels, if permitted. Dependant on whether PMUv3
82  * is implemented.
83  */
84 #define MDCR_EL2_HPMN (UINT64_C(0x1f) << 0)
85 
86 /*
87  * Definitions for interpreting the ESR_ELx registers.
88  * See Arm Architecture Reference Manual Armv8-A, D13.2.36 and D13.2.37.
89  */
90 
91 /**
92  * Offset for the Exception Class (EC) field in the ESR.
93  */
94 #define ESR_EC_OFFSET UINT64_C(26)
95 
96 /**
97  * Gets the Exception Class from the ESR.
98  */
99 #define GET_ESR_EC(esr) ((esr) >> ESR_EC_OFFSET)
100 
101 /**
102  * Gets the Instruction Length bit for the synchronous exception
103  */
104 #define GET_ESR_IL(esr) ((esr) & (1 << 25))
105 
106 /**
107  * ESR code for an Unknown Reason exception.
108  */
109 #define EC_UNKNOWN UINT64_C(0x0)
110 
111 /**
112  * ESR code for trapped WFI or WFE instruction execution.
113  */
114 #define EC_WFI_WFE UINT64_C(0x1)
115 
116 /**
117  * ESR code for SVC instruction execution.
118  */
119 #define EC_SVC UINT64_C(0x15)
120 
121 /**
122  * ESR code for HVC instruction execution.
123  */
124 #define EC_HVC UINT64_C(0x16)
125 
126 /**
127  * ESR code for SMC instruction execution.
128  */
129 #define EC_SMC UINT64_C(0x17)
130 
131 /**
132  * ESR code for MSR, MRS, or System instruction execution.
133  */
134 #define EC_MSR UINT64_C(0x18)
135 
136 /**
137  * ESR code for Instruction Abort from a lower Exception level.
138  */
139 #define EC_INSTRUCTION_ABORT_LOWER_EL UINT64_C(0x20)
140 
141 /**
142  * ESR code for Instruction Abort without a change in Exception level.
143  */
144 #define EC_INSTRUCTION_ABORT_SAME_EL UINT64_C(0x21)
145 
146 /**
147  * ESR code for Data Abort from a lower Exception level.
148  */
149 #define EC_DATA_ABORT_LOWER_EL UINT64_C(0x24)
150 
151 /**
152  * ESR code for Data Abort without a change in Exception level.
153  */
154 #define EC_DATA_ABORT_SAME_EL UINT64_C(0x25)
155 
156 /**
157  * Mask for ISS bits in ESR_ELx registers.
158  */
159 #define ISS_MASK ((UINT64_C(0x1) << 22) - UINT64_C(0x1))
160 
161 #define GET_ESR_ISS(esr) (ISS_MASK & (esr))
162 
163 /**
164  * System register are identified by op0, op2, op1, crn, crm. The ISS encoding
165  * includes also rt and direction. Exclude them, @see D13.2.37 (D13-2977).
166  */
167 #define ISS_SYSREG_MASK                                     \
168 	(ISS_MASK &		  /* Select the ISS bits */ \
169 	 ~(UINT64_C(0x1f) << 5) & /* exclude rt */          \
170 	 ~UINT64_C(0x1) /* exclude direction */)
171 
172 #define GET_ISS_SYSREG(esr) (ISS_SYSREG_MASK & (esr))
173 
174 /**
175  * Op0 from the ISS encoding in the ESR.
176  */
177 #define ISS_OP0_MASK UINT64_C(0x300000)
178 #define ISS_OP0_SHIFT 20
179 #define GET_ISS_OP0(esr) ((ISS_OP0_MASK & (esr)) >> ISS_OP0_SHIFT)
180 
181 /**
182  * Op1 from the ISS encoding in the ESR.
183  */
184 #define ISS_OP1_MASK UINT64_C(0x1c000)
185 #define ISS_OP1_SHIFT 14
186 #define GET_ISS_OP1(esr) ((ISS_OP1_MASK & (esr)) >> ISS_OP1_SHIFT)
187 
188 /**
189  * Op2 from the ISS encoding in the ESR.
190  */
191 #define ISS_OP2_MASK UINT64_C(0xe0000)
192 #define ISS_OP2_SHIFT 17
193 #define GET_ISS_OP2(esr) ((ISS_OP2_MASK & (esr)) >> ISS_OP2_SHIFT)
194 
195 /**
196  * CRn from the ISS encoding in the ESR.
197  */
198 #define ISS_CRN_MASK UINT64_C(0x3c00)
199 #define ISS_CRN_SHIFT 10
200 #define GET_ISS_CRN(esr) ((ISS_CRN_MASK & (esr)) >> ISS_CRN_SHIFT)
201 
202 /**
203  * CRm from the ISS encoding in the ESR.
204  */
205 #define ISS_CRM_MASK UINT64_C(0x1e)
206 #define ISS_CRM_SHIFT 1
207 #define GET_ISS_CRM(esr) ((ISS_CRM_MASK & (esr)) >> ISS_CRM_SHIFT)
208 
209 /**
210  * Rt, which identifies the general purpose register used for the operation.
211  */
212 #define ISS_RT_MASK UINT64_C(0x3e0)
213 #define ISS_RT_SHIFT 5
214 #define GET_ISS_RT(esr) ((ISS_RT_MASK & (esr)) >> ISS_RT_SHIFT)
215 
216 /**
217  * Direction (i.e., read (1) or write (0), is the first bit in the ISS/ESR.
218  */
219 #define ISS_DIRECTION_MASK UINT64_C(0x1)
220 
221 /**
222  * Gets the direction of the system register access, read (1) or write (0).
223  */
224 #define GET_ISS_DIRECTION(esr) (ISS_DIRECTION_MASK & (esr))
225 
226 /**
227  * True if the ISS encoded in the esr indicates a read of the system register.
228  */
229 #define ISS_IS_READ(esr) (ISS_DIRECTION_MASK & (esr))
230 
231 /**
232  * Returns the ISS encoding given the various instruction encoding parameters.
233  */
234 #define GET_ISS_ENCODING(op0, op1, crn, crm, op2)          \
235 	((op0) << ISS_OP0_SHIFT | (op2) << ISS_OP2_SHIFT | \
236 	 (op1) << ISS_OP1_SHIFT | (crn) << ISS_CRN_SHIFT | \
237 	 (crm) << ISS_CRM_SHIFT)
238 
239 #define PMCR_EL0_N_MASK UINT64_C(0xf800)
240 #define PMCR_EL0_N_SHIFT 11
241 #define GET_PMCR_EL0_N(pmcr) ((PMCR_EL0_N_MASK & (pmcr)) >> PMCR_EL0_N_SHIFT)
242 
243 /*
244  * Define various configurations bits for the Hypervisor Configuration Register,
245  * HCR_EL2. See Arm Architecture Reference Manual, D13.2.46.
246  */
247 
248 /**
249  * Trap ID group 5 (Armv8.5-MemTag related).
250  */
251 #define HCR_EL2_TID5 (UINT64_C(0x1) << 58)
252 
253 /**
254  * Stage 1 translations are treated as Tagged.
255  */
256 #define HCR_EL2_DCT (UINT64_C(0x1) << 57)
257 
258 /**
259  * Allocation tag access for MTE.
260  */
261 #define HCR_EL2_ATA (UINT64_C(0x1) << 56)
262 
263 /**
264  * Trap TLB maintenance instructions that operate on the Outer Shareable domain.
265  */
266 #define HCR_EL2_TTLBOS (UINT64_C(0x1) << 55)
267 
268 /**
269  * Trap TLB maintenance instructions that operate on the Inner Shareable domain.
270  */
271 #define HCR_EL2_TTLBIS (UINT64_C(0x1) << 54)
272 
273 /**
274  * Trap cache maintenance instructions that operate to the Point of Unification.
275  */
276 #define HCR_EL2_TOCU (UINT64_C(0x1) << 52)
277 
278 /**
279  * Trap ICIALLUIS/IC IALLUIS cache maintenance instructions.
280  */
281 #define HCR_EL2_TICAB (UINT64_C(0x1) << 52)
282 
283 /**
284  * Trap ID group 4.
285  */
286 #define HCR_EL2_TID4 (UINT64_C(0x1) << 49)
287 
288 /**
289  * When set *disables* traps on Pointer Authentication related instruction
290  * execution.
291  */
292 #define HCR_EL2_API (UINT64_C(0x1) << 41)
293 
294 /**
295  * When set *disables* traps on access to Pointer Authentication's "key"
296  * registers.
297  */
298 #define HCR_EL2_APK (UINT64_C(0x1) << 40)
299 
300 /**
301  * Trap Error record accesses when RAS is implemented.
302  */
303 #define HCR_EL2_TERR (UINT64_C(0x1) << 36)
304 
305 /**
306  * Trap LOR register accesses when LORegions is implemented.
307  */
308 #define HCR_EL2_TLOR (UINT64_C(0x1) << 35)
309 
310 /**
311  * Stage 2 Instruction access cacheability disable.
312  * When set, forces all stage 2 translations for instruction accesses to normal
313  * memory to be non-cacheable.
314  */
315 #define HCR_EL2_ID (UINT64_C(0x1) << 33)
316 
317 /**
318  * Stage 2 Data access cacheability disable.
319  * When set, forces all stage 2 translations for data accesses to normal memory
320  * to be non-cacheable.
321  */
322 #define HCR_EL2_CD (UINT64_C(0x1) << 32)
323 
324 /**
325  * Execution state control for lower exception levels.
326  * When set, the execution state for EL1 is AArch64.
327  */
328 #define HCR_EL2_RW (UINT64_C(0x1) << 31)
329 
330 /**
331  * Trap reads of Virtual Memory controls.
332  */
333 #define HCR_EL2_TRVM (UINT64_C(0x1) << 30)
334 
335 /**
336  * Trap writes of Virtual Memory controls.
337  */
338 #define HCR_EL2_TVM (UINT64_C(0x1) << 26)
339 
340 /**
341  * Trap TLB maintenance instructions.
342  */
343 #define HCR_EL2_TTLB (UINT64_C(0x1) << 25)
344 
345 /**
346  * Trap cache maintenance instructions.
347  */
348 #define HCR_EL2_TPU (UINT64_C(0x1) << 24)
349 
350 /**
351  * Trap data or unified cache maintenance instructions.
352  */
353 #define HCR_EL2_TPCP (UINT64_C(0x1) << 23)
354 
355 /**
356  * Trap data or unified cache maintenance instructions that operate by Set/Way.
357  */
358 #define HCR_EL2_TSW (UINT64_C(0x1) << 22)
359 
360 /**
361  * Trap Auxiliary Control Registers.
362  * When set, traps ACTLR_EL1 accesses to EL2.
363  */
364 #define HCR_EL2_TACR (UINT64_C(0x1) << 21)
365 
366 /**
367  * Trap implementation defined functionality.
368  * When set, traps EL1 accesses to implementation defined encodings to EL2.
369  */
370 #define HCR_EL2_TIDCP (UINT64_C(0x1) << 20)
371 
372 /**
373  * Trap SMC instructions.
374  * When set, traps EL1 execution of SMC instructions to EL2.
375  */
376 #define HCR_EL2_TSC (UINT64_C(0x1) << 19)
377 
378 /**
379  * Trap ID group 3.
380  */
381 #define HCR_EL2_TID3 (UINT64_C(0x1) << 18)
382 
383 /**
384  * Trap ID group 2.
385  */
386 #define HCR_EL2_TID2 (UINT64_C(0x1) << 17)
387 
388 /**
389  * Trap ID group 1.
390  */
391 #define HCR_EL2_TID1 (UINT64_C(0x1) << 16)
392 
393 /**
394  * Trap ID group 0.
395  */
396 #define HCR_EL2_TID0 (UINT64_C(0x1) << 15)
397 
398 /**
399  * Traps EL0 and EL1 execution of Wait for Event (WFE) instructions to EL2.
400  */
401 #define HCR_EL2_TWE (UINT64_C(0x1) << 14)
402 
403 /**
404  * Trap WFI instructions.
405  * When set, traps EL0 and EL1 execution of WFI instructions to EL2.
406  */
407 #define HCR_EL2_TWI (UINT64_C(0x1) << 13)
408 
409 /**
410  * Barrier Shareability upgrade (2 bits).
411  * When set to 0b01, the minimum shareability domain to barrier instructions
412  * is inner shareable.
413  */
414 #define HCR_EL2_BSU_INNER_SHAREABLE (UINT64_C(0x1) << 10)
415 
416 /**
417  * Force Broadcast.
418  * When set certain instructions related to invalidating the TLB are broadcast
419  * within the Inner Shareable domain.
420  */
421 #define HCR_EL2_FB (UINT64_C(0x1) << 9)
422 
423 /**
424  * Virtual IRQ Interrupt.
425  * When set indicates that there is a virtual IRQ pending.
426  */
427 #define HCR_EL2_VI (UINT64_C(0x1) << 7)
428 
429 /**
430  * Virtual FIQ Interrupt.
431  * When set indicates that there is a virtual FIQ pending.
432  */
433 #define HCR_EL2_VF (UINT64_C(0x1) << 6)
434 
435 /**
436  * Physical SError Routing.
437  * When set, physical SError interrupts are taken to EL2, unless routed to EL3.
438  */
439 #define HCR_EL2_AMO (UINT64_C(0x1) << 5)
440 
441 /**
442  * Physical IRQ Routing.
443  * When set, physical IRQ interrupts are taken to EL2, unless routed to EL3.
444  */
445 #define HCR_EL2_IMO (UINT64_C(0x1) << 4)
446 
447 /**
448  * Physical FIQ Routing.
449  * When set, physical FIQ interrupts are taken to EL2, unless routed to EL3.
450  */
451 #define HCR_EL2_FMO (UINT64_C(0x1) << 3)
452 
453 /**
454  * Protected Table Walk.
455  * When set a translation table access made as part of a stage 1 translation
456  * table walk is subject to a stage 2 translation. The memory access generates a
457  * stage 2 permission fault.
458  */
459 #define HCR_EL2_PTW (UINT64_C(0x1) << 2)
460 
461 /**
462  * Set/Way Invalidation Override.
463  * Causes EL1 execution of the data cache invalidate by set/way instructions to
464  * perform a data cache clean and invalidate by set/way.
465  */
466 #define HCR_EL2_SWIO (UINT64_C(0x1) << 1)
467 
468 /**
469  * Virtualization enable.
470  * When set EL1 and EL0 stage 2 address translation is enabled.
471  */
472 #define HCR_EL2_VM (UINT64_C(0x1) << 0)
473 
474 /**
475  * EL2 Host. Enables a configuration where a Host Operating System is running
476  * in EL2, and the Host Operating System's applications are running in EL0.
477  */
478 #define HCR_EL2_E2H (UINT64_C(0x1) << 34)
479 
480 /**
481  * Trap General Exceptions, from EL0.
482  */
483 
484 #define HCR_EL2_TGE_SHIFT (27)
485 #define HCR_EL2_TGE (UINT64_C(0x1) << HCR_EL2_TGE_SHIFT)
486 
487 /**
488  * Trap system register accesses to trace registers.
489  * Traps accesses to ETM registers using the register interface. Does not trap
490  * on accesses through the memory-mapped interface.
491  * CPTR_EL2.TTA is register bit 20 when HCR_EL2.E2H=0 (ARMv8.1-VHE disabled)
492  * and is register bit 28 with HCR_EL2.E2H=1 (ARMv8.1-VHE enabled)
493  */
494 #define CPTR_EL2_TTA (UINT64_C(0x1) << 20)
495 #define CPTR_EL2_VHE_TTA (UINT64_C(0x1) << 28)
496 
497 /**
498  * When HCR_EL2.E2H=1 (ARMv8.1-VHE enabled), CPTR_EL2 contains control bits to
499  * enable and disable access to Floating Point, Advanced SIMD and SVE
500  * instructions. This control does not cause execution of FP/SIMD instructions
501  * to be trapped.
502  */
503 #define CPTR_EL2_VHE_FPEN (UINT64_C(0x3) << 20)
504 
505 /**
506  * When HCR_EL2.E2H=1, this control does not cause execution of SVE instructions
507  * and accesses to ZCR_EL2/ZCR_EL1 to be trapped.
508  */
509 #define CPTR_EL2_VHE_ZEN (UINT64_C(0x3) << 16)
510 
511 /*
512  * Process State Bit definitions.
513  *
514  * These apply to the PSTATE, as well as registers that contain PSTATE fields,
515  * e.g., SPSR_EL1.
516  */
517 
518 /**
519  * Debug exception mask bit.
520  */
521 #define PSR_D (UINT64_C(1) << 9)
522 
523 /**
524  * Asynchronous SError interrupt mask bit.
525  */
526 #define PSR_A (UINT64_C(1) << 8)
527 
528 /**
529  * Asynchronous IRQ interrupt mask bit.
530  */
531 #define PSR_I (UINT64_C(1) << 7)
532 
533 /**
534  * Asynchronous FIQ interrupt mask bit.
535  */
536 #define PSR_F (UINT64_C(1) << 6)
537 
538 /**
539  * AArch32 State bit.
540  */
541 #define PSR_ARCH_MODE_32 (UINT64_C(1) << 4)
542 
543 /**
544  * PE Mode bit mask.
545  */
546 #define PSR_PE_MODE_MASK UINT64_C(0xf)
547 
548 /**
549  * PE Mode: EL0t.
550  */
551 #define PSR_PE_MODE_EL0T UINT64_C(0x0)
552 
553 /**
554  * PE Mode: EL1h.
555  */
556 #define PSR_PE_MODE_EL1H UINT64_C(0x5)
557 
558 /*
559  * Define configurations bits for the System Control Register (EL2), SCTLR_EL2.
560  * See Arm Architecture Reference Manual, D13.2.106.
561  */
562 
563 /**
564  * When SCTLR_EL3.ATA is 1, controls EL2 access to Allocation Tags.
565  */
566 #define SCTLR_EL2_ATA (UINT64_C(0x1) << 43)
567 
568 /**
569  * When SCTLR_EL3.ATA0 is 1, controls EL0 access to Allocation Tags.
570  */
571 #define SCTLR_EL2_ATA0 (UINT64_C(0x1) << 42)
572 
573 /**
574  * SCTLR_EL2.TCF controls the effect of Tag Check Faults in EL2.
575  */
576 #define SCTLR_EL2_TCF_SHIFT (40)
577 #define SCTLR_EL2_TCF_MASK (UINT64_C(0x3))
578 
579 /**
580  * When FEAT_BTI is implemented, BT bit set, and PE executing at EL2
581  * PACIASP/PACIBSP are not compatible with PSTATE.BTYPE==11.
582  */
583 #define SCTLR_EL2_BT (UINT64_C(0x1) << 36)
584 
585 /**
586  * When FEAT_PAUTH is implemented, controls enabling of pointer authentication
587  * using APIAKey_EL1 of instructions addresses in EL2 or EL2&0 translation
588  * regimes.
589  */
590 #define SCTLR_EL2_ENIA (UINT64_C(0x1) << 31)
591 
592 /**
593  * Reserved, RES1.
594  */
595 #define SCTLR_EL2_B28 (UINT64_C(0x1) << 28)
596 
597 /**
598  * Exception entry is a context synchronization Event (Armv8.5-CSEH),
599  * otherwise RES1.
600  */
601 #define SCTLR_EL2_EIS (UINT64_C(0x1) << 22)
602 
603 /**
604  * Implicit Error Synchronization event enable (ARMv8.2-IESB).
605  */
606 #define SCTLR_EL2_IESB (UINT64_C(0x1) << 21)
607 
608 /**
609  * Write permission implies XN (Execute-never).
610  */
611 #define SCTLR_EL2_WXN (UINT64_C(0x1) << 19)
612 
613 /**
614  * Reserved, RES1.
615  */
616 #define SCTLR_EL2_B18 (UINT64_C(0x1) << 18)
617 
618 /**
619  * Reserved, RES1.
620  */
621 #define SCTLR_EL2_B16 (UINT64_C(0x1) << 16)
622 
623 /**
624  * Instruction access Cacheability control.
625  */
626 #define SCTLR_EL2_I (UINT64_C(0x1) << 12)
627 
628 /**
629  * Exception exit is a context synchronization Event (Armv8.5-CSEH),
630  * otherwise RES1.
631  */
632 #define SCTLR_EL2_EOS (UINT64_C(0x1) << 11)
633 
634 /**
635  * Reserved, RES1.
636  */
637 #define SCTLR_EL2_B4 (UINT64_C(0x3) << 4)
638 
639 /**
640  * SP Alignment check enable.
641  */
642 #define SCTLR_EL2_SA (UINT64_C(0x1) << 3)
643 
644 /**
645  * Cacheability control, for data accesses.
646  */
647 #define SCTLR_EL2_C (UINT64_C(0x1) << 2)
648 
649 /**
650  * Alignment check enable.
651  */
652 #define SCTLR_EL2_A_SHIFT (1)
653 #define SCTLR_EL2_A (UINT64_C(0x1) << SCTLR_EL2_A_SHIFT)
654 
655 /**
656  * MMU enable for EL2 stage 1 address translation.
657  */
658 #define SCTLR_EL2_M (UINT64_C(0x1) << 0)
659 
660 /**
661  * VHE feature bit
662  */
663 #define ID_AA64MMFR1_EL1_VH_SHIFT 8
664 #define ID_AA64MMFR1_EL1_VH_MASK UINT64_C(0xf)
665 #define ID_AA64MMFR1_EL1_VH_SUPPORTED UINT64_C(0x1)
666 
667 /**
668  * Branch Target Identification mechanism support in AArch64 state.
669  */
670 #define ID_AA64PFR1_EL1_BT (UINT64_C(0xf) << 0)
671 
672 /**
673  * Scalable Vector Extension.
674  */
675 #define ID_AA64PFR0_EL1_SVE_SHIFT 32
676 #define ID_AA64PFR0_EL1_SVE_MASK UINT64_C(0xf)
677 #define ID_AA64PFR0_EL1_SVE_SUPPORTED UINT64_C(0x1)
678 
679 /**
680  * Returns true if the SVE feature is implemented.
681  */
682 
683 /** SVE control register. */
684 #define ZCR_LEN_MASK UINT32_C(0xf)
685 #define ZCR_LEN_MAX UINT32_C(0xf)
686