1 /* 2 * Copyright (c) 2021-2024 HPMicro 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 * 6 */ 7 8 9 #ifndef HPM_VSC_H 10 #define HPM_VSC_H 11 12 typedef struct { 13 __RW uint32_t ABC_MODE; /* 0x0: abc mode */ 14 __RW uint32_t ADC_CHAN_ASSIGN; /* 0x4: assign adc_chan for value_a/b/c */ 15 __RW uint32_t VALUE_A_DATA_OPT; /* 0x8: value_a data operation mode */ 16 __R uint8_t RESERVED0[4]; /* 0xC - 0xF: Reserved */ 17 __RW uint32_t VALUE_B_DATA_OPT; /* 0x10: value_b data operation mode */ 18 __R uint8_t RESERVED1[4]; /* 0x14 - 0x17: Reserved */ 19 __RW uint32_t VALUE_C_DATA_OPT; /* 0x18: value_c data operation mode */ 20 __R uint8_t RESERVED2[4]; /* 0x1C - 0x1F: Reserved */ 21 __RW uint32_t VALUE_A_OFFSET; /* 0x20: value_a offset */ 22 __RW uint32_t VALUE_B_OFFSET; /* 0x24: value_b_offset */ 23 __RW uint32_t VALUE_C_OFFSET; /* 0x28: value_c offset */ 24 __RW uint32_t IRQ_STATUS; /* 0x2C: irq status */ 25 __RW uint32_t VALUE_A_SW; /* 0x30: value_a software inject value */ 26 __RW uint32_t VALUE_B_SW; /* 0x34: value_b software inject value */ 27 __RW uint32_t VALUE_C_SW; /* 0x38: value_c software inject value */ 28 __W uint32_t VALUE_SW_READY; /* 0x3C: software inject value_a/value_b/value_c ready */ 29 __W uint32_t TRIGGER_SW; /* 0x40: software trigger event */ 30 __RW uint32_t TIMELOCK; /* 0x44: timestamp mode and postion capture ctrl */ 31 __RW uint32_t POSITION_SW; /* 0x48: position software inject value */ 32 __RW uint32_t ADC_WAIT_CYCLE; /* 0x4C: adc wait cycle after trigger adc capture event */ 33 __RW uint32_t POS_WAIT_CYCLE; /* 0x50: pos wait cycle after trigger adc capture event */ 34 __RW uint32_t IRQ_ENABLE; /* 0x54: irq bit enable */ 35 __RW uint32_t ADC_PHASE_TOLERATE; /* 0x58: adc phase tolerate */ 36 __RW uint32_t POS_POLE; /* 0x5C: position pole num */ 37 __R uint8_t RESERVED3[160]; /* 0x60 - 0xFF: Reserved */ 38 __R uint32_t ID_POSEDGE; /* 0x100: posedge order Id value */ 39 __R uint32_t IQ_POSEDGE; /* 0x104: posedge order Iq value */ 40 __R uint32_t ID_NEGEDGE; /* 0x108: negedge order Id value */ 41 __R uint32_t IQ_NEGEDGE; /* 0x10C: negedge order Iq value */ 42 __R uint32_t ALPHA_POSEDGE; /* 0x110: posedge order alpha value */ 43 __R uint32_t BETA_POSEDGE; /* 0x114: posedge order beta value */ 44 __R uint32_t ALPHA_NEGEDGE; /* 0x118: negedge order alpha value */ 45 __R uint32_t BETA_NEGEDGE; /* 0x11C: negedge order beta value */ 46 __R uint32_t TIMESTAMP_LOCKED; /* 0x120: timestamp_locked */ 47 __R uint32_t DEBUG_STATUS0; /* 0x124: debug_status0 */ 48 } VSC_Type; 49 50 51 /* Bitfield definition for register: ABC_MODE */ 52 /* 53 * PHASE_ABSENT_MODE (RW) 54 * 55 * whether using value_a and value_b instead of three phase 56 */ 57 #define VSC_ABC_MODE_PHASE_ABSENT_MODE_MASK (0x80000000UL) 58 #define VSC_ABC_MODE_PHASE_ABSENT_MODE_SHIFT (31U) 59 #define VSC_ABC_MODE_PHASE_ABSENT_MODE_SET(x) (((uint32_t)(x) << VSC_ABC_MODE_PHASE_ABSENT_MODE_SHIFT) & VSC_ABC_MODE_PHASE_ABSENT_MODE_MASK) 60 #define VSC_ABC_MODE_PHASE_ABSENT_MODE_GET(x) (((uint32_t)(x) & VSC_ABC_MODE_PHASE_ABSENT_MODE_MASK) >> VSC_ABC_MODE_PHASE_ABSENT_MODE_SHIFT) 61 62 /* 63 * VALUE_C_WIDTH (RW) 64 * 65 * numbers of value_c for each convert 66 */ 67 #define VSC_ABC_MODE_VALUE_C_WIDTH_MASK (0xF000000UL) 68 #define VSC_ABC_MODE_VALUE_C_WIDTH_SHIFT (24U) 69 #define VSC_ABC_MODE_VALUE_C_WIDTH_SET(x) (((uint32_t)(x) << VSC_ABC_MODE_VALUE_C_WIDTH_SHIFT) & VSC_ABC_MODE_VALUE_C_WIDTH_MASK) 70 #define VSC_ABC_MODE_VALUE_C_WIDTH_GET(x) (((uint32_t)(x) & VSC_ABC_MODE_VALUE_C_WIDTH_MASK) >> VSC_ABC_MODE_VALUE_C_WIDTH_SHIFT) 71 72 /* 73 * VALUE_B_WIDTH (RW) 74 * 75 * numbers of value_b for each convert 76 */ 77 #define VSC_ABC_MODE_VALUE_B_WIDTH_MASK (0xF00000UL) 78 #define VSC_ABC_MODE_VALUE_B_WIDTH_SHIFT (20U) 79 #define VSC_ABC_MODE_VALUE_B_WIDTH_SET(x) (((uint32_t)(x) << VSC_ABC_MODE_VALUE_B_WIDTH_SHIFT) & VSC_ABC_MODE_VALUE_B_WIDTH_MASK) 80 #define VSC_ABC_MODE_VALUE_B_WIDTH_GET(x) (((uint32_t)(x) & VSC_ABC_MODE_VALUE_B_WIDTH_MASK) >> VSC_ABC_MODE_VALUE_B_WIDTH_SHIFT) 81 82 /* 83 * VALUE_A_WIDTH (RW) 84 * 85 * numbers of value_a for each convert 86 */ 87 #define VSC_ABC_MODE_VALUE_A_WIDTH_MASK (0xF0000UL) 88 #define VSC_ABC_MODE_VALUE_A_WIDTH_SHIFT (16U) 89 #define VSC_ABC_MODE_VALUE_A_WIDTH_SET(x) (((uint32_t)(x) << VSC_ABC_MODE_VALUE_A_WIDTH_SHIFT) & VSC_ABC_MODE_VALUE_A_WIDTH_MASK) 90 #define VSC_ABC_MODE_VALUE_A_WIDTH_GET(x) (((uint32_t)(x) & VSC_ABC_MODE_VALUE_A_WIDTH_MASK) >> VSC_ABC_MODE_VALUE_A_WIDTH_SHIFT) 91 92 /* 93 * VALUE_C_LOC (RW) 94 * 95 * the adc index of value_c: 96 * 2'b:00: resevered; 97 * 2'b:01: from adc0; 98 * 2'b:10: from adc1; 99 * 2'b:11: from adc2; 100 */ 101 #define VSC_ABC_MODE_VALUE_C_LOC_MASK (0x3000U) 102 #define VSC_ABC_MODE_VALUE_C_LOC_SHIFT (12U) 103 #define VSC_ABC_MODE_VALUE_C_LOC_SET(x) (((uint32_t)(x) << VSC_ABC_MODE_VALUE_C_LOC_SHIFT) & VSC_ABC_MODE_VALUE_C_LOC_MASK) 104 #define VSC_ABC_MODE_VALUE_C_LOC_GET(x) (((uint32_t)(x) & VSC_ABC_MODE_VALUE_C_LOC_MASK) >> VSC_ABC_MODE_VALUE_C_LOC_SHIFT) 105 106 /* 107 * VALUE_B_LOC (RW) 108 * 109 * the adc index of value_b: 110 * 2'b:00: resevered; 111 * 2'b:01: from adc0; 112 * 2'b:10: from adc1; 113 * 2'b:11: from adc2; 114 */ 115 #define VSC_ABC_MODE_VALUE_B_LOC_MASK (0x300U) 116 #define VSC_ABC_MODE_VALUE_B_LOC_SHIFT (8U) 117 #define VSC_ABC_MODE_VALUE_B_LOC_SET(x) (((uint32_t)(x) << VSC_ABC_MODE_VALUE_B_LOC_SHIFT) & VSC_ABC_MODE_VALUE_B_LOC_MASK) 118 #define VSC_ABC_MODE_VALUE_B_LOC_GET(x) (((uint32_t)(x) & VSC_ABC_MODE_VALUE_B_LOC_MASK) >> VSC_ABC_MODE_VALUE_B_LOC_SHIFT) 119 120 /* 121 * VALUE_A_LOC (RW) 122 * 123 * the adc index of value_a: 124 * 2'b:00: resevered; 125 * 2'b:01: from adc0; 126 * 2'b:10: from adc1; 127 * 2'b:11: from adc2; 128 */ 129 #define VSC_ABC_MODE_VALUE_A_LOC_MASK (0x30U) 130 #define VSC_ABC_MODE_VALUE_A_LOC_SHIFT (4U) 131 #define VSC_ABC_MODE_VALUE_A_LOC_SET(x) (((uint32_t)(x) << VSC_ABC_MODE_VALUE_A_LOC_SHIFT) & VSC_ABC_MODE_VALUE_A_LOC_MASK) 132 #define VSC_ABC_MODE_VALUE_A_LOC_GET(x) (((uint32_t)(x) & VSC_ABC_MODE_VALUE_A_LOC_MASK) >> VSC_ABC_MODE_VALUE_A_LOC_SHIFT) 133 134 /* 135 * ENABLE_VSC (RW) 136 * 137 * enable vsc convert: 138 * 0: disable vsc convert 139 * 1: enable vsc convert 140 */ 141 #define VSC_ABC_MODE_ENABLE_VSC_MASK (0x8U) 142 #define VSC_ABC_MODE_ENABLE_VSC_SHIFT (3U) 143 #define VSC_ABC_MODE_ENABLE_VSC_SET(x) (((uint32_t)(x) << VSC_ABC_MODE_ENABLE_VSC_SHIFT) & VSC_ABC_MODE_ENABLE_VSC_MASK) 144 #define VSC_ABC_MODE_ENABLE_VSC_GET(x) (((uint32_t)(x) & VSC_ABC_MODE_ENABLE_VSC_MASK) >> VSC_ABC_MODE_ENABLE_VSC_SHIFT) 145 146 /* Bitfield definition for register: ADC_CHAN_ASSIGN */ 147 /* 148 * VALUE_C_CHAN (RW) 149 * 150 * value_c's adc chan 151 */ 152 #define VSC_ADC_CHAN_ASSIGN_VALUE_C_CHAN_MASK (0x1F0000UL) 153 #define VSC_ADC_CHAN_ASSIGN_VALUE_C_CHAN_SHIFT (16U) 154 #define VSC_ADC_CHAN_ASSIGN_VALUE_C_CHAN_SET(x) (((uint32_t)(x) << VSC_ADC_CHAN_ASSIGN_VALUE_C_CHAN_SHIFT) & VSC_ADC_CHAN_ASSIGN_VALUE_C_CHAN_MASK) 155 #define VSC_ADC_CHAN_ASSIGN_VALUE_C_CHAN_GET(x) (((uint32_t)(x) & VSC_ADC_CHAN_ASSIGN_VALUE_C_CHAN_MASK) >> VSC_ADC_CHAN_ASSIGN_VALUE_C_CHAN_SHIFT) 156 157 /* 158 * VALUE_B_CHAN (RW) 159 * 160 * value_b's adc chan 161 */ 162 #define VSC_ADC_CHAN_ASSIGN_VALUE_B_CHAN_MASK (0x1F00U) 163 #define VSC_ADC_CHAN_ASSIGN_VALUE_B_CHAN_SHIFT (8U) 164 #define VSC_ADC_CHAN_ASSIGN_VALUE_B_CHAN_SET(x) (((uint32_t)(x) << VSC_ADC_CHAN_ASSIGN_VALUE_B_CHAN_SHIFT) & VSC_ADC_CHAN_ASSIGN_VALUE_B_CHAN_MASK) 165 #define VSC_ADC_CHAN_ASSIGN_VALUE_B_CHAN_GET(x) (((uint32_t)(x) & VSC_ADC_CHAN_ASSIGN_VALUE_B_CHAN_MASK) >> VSC_ADC_CHAN_ASSIGN_VALUE_B_CHAN_SHIFT) 166 167 /* 168 * VALUE_A_CHAN (RW) 169 * 170 * value_a's adc chan 171 */ 172 #define VSC_ADC_CHAN_ASSIGN_VALUE_A_CHAN_MASK (0x1FU) 173 #define VSC_ADC_CHAN_ASSIGN_VALUE_A_CHAN_SHIFT (0U) 174 #define VSC_ADC_CHAN_ASSIGN_VALUE_A_CHAN_SET(x) (((uint32_t)(x) << VSC_ADC_CHAN_ASSIGN_VALUE_A_CHAN_SHIFT) & VSC_ADC_CHAN_ASSIGN_VALUE_A_CHAN_MASK) 175 #define VSC_ADC_CHAN_ASSIGN_VALUE_A_CHAN_GET(x) (((uint32_t)(x) & VSC_ADC_CHAN_ASSIGN_VALUE_A_CHAN_MASK) >> VSC_ADC_CHAN_ASSIGN_VALUE_A_CHAN_SHIFT) 176 177 /* Bitfield definition for register: VALUE_A_DATA_OPT */ 178 /* 179 * OPT_3 (RW) 180 * 181 * 0: PLUS_MUL_1 182 * 1: PLUS_MUL_2 183 * 5: PLUS_DIV_2 184 * 6: PLUS_DIV_3 185 * 7: PLUS_DIV_4 186 * 8: MINUS MUL 1 187 * 9: MINUS MUL 2 188 * 13: MINUS DIV 2 189 * 14: MINUS DIV 3 190 * 15: MINUS DIV 4 191 */ 192 #define VSC_VALUE_A_DATA_OPT_OPT_3_MASK (0xF000U) 193 #define VSC_VALUE_A_DATA_OPT_OPT_3_SHIFT (12U) 194 #define VSC_VALUE_A_DATA_OPT_OPT_3_SET(x) (((uint32_t)(x) << VSC_VALUE_A_DATA_OPT_OPT_3_SHIFT) & VSC_VALUE_A_DATA_OPT_OPT_3_MASK) 195 #define VSC_VALUE_A_DATA_OPT_OPT_3_GET(x) (((uint32_t)(x) & VSC_VALUE_A_DATA_OPT_OPT_3_MASK) >> VSC_VALUE_A_DATA_OPT_OPT_3_SHIFT) 196 197 /* 198 * OPT_2 (RW) 199 * 200 * 0: PLUS_MUL_1 201 * 1: PLUS_MUL_2 202 * 5: PLUS_DIV_2 203 * 6: PLUS_DIV_3 204 * 7: PLUS_DIV_4 205 * 8: MINUS MUL 1 206 * 9: MINUS MUL 2 207 * 13: MINUS DIV 2 208 * 14: MINUS DIV 3 209 * 15: MINUS DIV 4 210 */ 211 #define VSC_VALUE_A_DATA_OPT_OPT_2_MASK (0xF00U) 212 #define VSC_VALUE_A_DATA_OPT_OPT_2_SHIFT (8U) 213 #define VSC_VALUE_A_DATA_OPT_OPT_2_SET(x) (((uint32_t)(x) << VSC_VALUE_A_DATA_OPT_OPT_2_SHIFT) & VSC_VALUE_A_DATA_OPT_OPT_2_MASK) 214 #define VSC_VALUE_A_DATA_OPT_OPT_2_GET(x) (((uint32_t)(x) & VSC_VALUE_A_DATA_OPT_OPT_2_MASK) >> VSC_VALUE_A_DATA_OPT_OPT_2_SHIFT) 215 216 /* 217 * OPT_1 (RW) 218 * 219 * 0: PLUS_MUL_1 220 * 1: PLUS_MUL_2 221 * 5: PLUS_DIV_2 222 * 6: PLUS_DIV_3 223 * 7: PLUS_DIV_4 224 * 8: MINUS MUL 1 225 * 9: MINUS MUL 2 226 * 13: MINUS DIV 2 227 * 14: MINUS DIV 3 228 * 15: MINUS DIV 4 229 */ 230 #define VSC_VALUE_A_DATA_OPT_OPT_1_MASK (0xF0U) 231 #define VSC_VALUE_A_DATA_OPT_OPT_1_SHIFT (4U) 232 #define VSC_VALUE_A_DATA_OPT_OPT_1_SET(x) (((uint32_t)(x) << VSC_VALUE_A_DATA_OPT_OPT_1_SHIFT) & VSC_VALUE_A_DATA_OPT_OPT_1_MASK) 233 #define VSC_VALUE_A_DATA_OPT_OPT_1_GET(x) (((uint32_t)(x) & VSC_VALUE_A_DATA_OPT_OPT_1_MASK) >> VSC_VALUE_A_DATA_OPT_OPT_1_SHIFT) 234 235 /* 236 * OPT_0 (RW) 237 * 238 * 0: PLUS_MUL_1 239 * 1: PLUS_MUL_2 240 * 5: PLUS_DIV_2 241 * 6: PLUS_DIV_3 242 * 7: PLUS_DIV_4 243 * 8: MINUS MUL 1 244 * 9: MINUS MUL 2 245 * 13: MINUS DIV 2 246 * 14: MINUS DIV 3 247 * 15: MINUS DIV 4 248 */ 249 #define VSC_VALUE_A_DATA_OPT_OPT_0_MASK (0xFU) 250 #define VSC_VALUE_A_DATA_OPT_OPT_0_SHIFT (0U) 251 #define VSC_VALUE_A_DATA_OPT_OPT_0_SET(x) (((uint32_t)(x) << VSC_VALUE_A_DATA_OPT_OPT_0_SHIFT) & VSC_VALUE_A_DATA_OPT_OPT_0_MASK) 252 #define VSC_VALUE_A_DATA_OPT_OPT_0_GET(x) (((uint32_t)(x) & VSC_VALUE_A_DATA_OPT_OPT_0_MASK) >> VSC_VALUE_A_DATA_OPT_OPT_0_SHIFT) 253 254 /* Bitfield definition for register: VALUE_B_DATA_OPT */ 255 /* 256 * OPT_3 (RW) 257 * 258 * 0: PLUS_MUL_1 259 * 1: PLUS_MUL_2 260 * 5: PLUS_DIV_2 261 * 6: PLUS_DIV_3 262 * 7: PLUS_DIV_4 263 * 8: MINUS MUL 1 264 * 9: MINUS MUL 2 265 * 13: MINUS DIV 2 266 * 14: MINUS DIV 3 267 * 15: MINUS DIV 4 268 */ 269 #define VSC_VALUE_B_DATA_OPT_OPT_3_MASK (0xF000U) 270 #define VSC_VALUE_B_DATA_OPT_OPT_3_SHIFT (12U) 271 #define VSC_VALUE_B_DATA_OPT_OPT_3_SET(x) (((uint32_t)(x) << VSC_VALUE_B_DATA_OPT_OPT_3_SHIFT) & VSC_VALUE_B_DATA_OPT_OPT_3_MASK) 272 #define VSC_VALUE_B_DATA_OPT_OPT_3_GET(x) (((uint32_t)(x) & VSC_VALUE_B_DATA_OPT_OPT_3_MASK) >> VSC_VALUE_B_DATA_OPT_OPT_3_SHIFT) 273 274 /* 275 * OPT_2 (RW) 276 * 277 * 0: PLUS_MUL_1 278 * 1: PLUS_MUL_2 279 * 5: PLUS_DIV_2 280 * 6: PLUS_DIV_3 281 * 7: PLUS_DIV_4 282 * 8: MINUS MUL 1 283 * 9: MINUS MUL 2 284 * 13: MINUS DIV 2 285 * 14: MINUS DIV 3 286 * 15: MINUS DIV 4 287 */ 288 #define VSC_VALUE_B_DATA_OPT_OPT_2_MASK (0xF00U) 289 #define VSC_VALUE_B_DATA_OPT_OPT_2_SHIFT (8U) 290 #define VSC_VALUE_B_DATA_OPT_OPT_2_SET(x) (((uint32_t)(x) << VSC_VALUE_B_DATA_OPT_OPT_2_SHIFT) & VSC_VALUE_B_DATA_OPT_OPT_2_MASK) 291 #define VSC_VALUE_B_DATA_OPT_OPT_2_GET(x) (((uint32_t)(x) & VSC_VALUE_B_DATA_OPT_OPT_2_MASK) >> VSC_VALUE_B_DATA_OPT_OPT_2_SHIFT) 292 293 /* 294 * OPT_1 (RW) 295 * 296 * 0: PLUS_MUL_1 297 * 1: PLUS_MUL_2 298 * 5: PLUS_DIV_2 299 * 6: PLUS_DIV_3 300 * 7: PLUS_DIV_4 301 * 8: MINUS MUL 1 302 * 9: MINUS MUL 2 303 * 13: MINUS DIV 2 304 * 14: MINUS DIV 3 305 * 15: MINUS DIV 4 306 */ 307 #define VSC_VALUE_B_DATA_OPT_OPT_1_MASK (0xF0U) 308 #define VSC_VALUE_B_DATA_OPT_OPT_1_SHIFT (4U) 309 #define VSC_VALUE_B_DATA_OPT_OPT_1_SET(x) (((uint32_t)(x) << VSC_VALUE_B_DATA_OPT_OPT_1_SHIFT) & VSC_VALUE_B_DATA_OPT_OPT_1_MASK) 310 #define VSC_VALUE_B_DATA_OPT_OPT_1_GET(x) (((uint32_t)(x) & VSC_VALUE_B_DATA_OPT_OPT_1_MASK) >> VSC_VALUE_B_DATA_OPT_OPT_1_SHIFT) 311 312 /* 313 * OPT_0 (RW) 314 * 315 * 0: PLUS_MUL_1 316 * 1: PLUS_MUL_2 317 * 5: PLUS_DIV_2 318 * 6: PLUS_DIV_3 319 * 7: PLUS_DIV_4 320 * 8: MINUS MUL 1 321 * 9: MINUS MUL 2 322 * 13: MINUS DIV 2 323 * 14: MINUS DIV 3 324 * 15: MINUS DIV 4 325 */ 326 #define VSC_VALUE_B_DATA_OPT_OPT_0_MASK (0xFU) 327 #define VSC_VALUE_B_DATA_OPT_OPT_0_SHIFT (0U) 328 #define VSC_VALUE_B_DATA_OPT_OPT_0_SET(x) (((uint32_t)(x) << VSC_VALUE_B_DATA_OPT_OPT_0_SHIFT) & VSC_VALUE_B_DATA_OPT_OPT_0_MASK) 329 #define VSC_VALUE_B_DATA_OPT_OPT_0_GET(x) (((uint32_t)(x) & VSC_VALUE_B_DATA_OPT_OPT_0_MASK) >> VSC_VALUE_B_DATA_OPT_OPT_0_SHIFT) 330 331 /* Bitfield definition for register: VALUE_C_DATA_OPT */ 332 /* 333 * OPT_3 (RW) 334 * 335 * 0: PLUS_MUL_1 336 * 1: PLUS_MUL_2 337 * 5: PLUS_DIV_2 338 * 6: PLUS_DIV_3 339 * 7: PLUS_DIV_4 340 * 8: MINUS MUL 1 341 * 9: MINUS MUL 2 342 * 13: MINUS DIV 2 343 * 14: MINUS DIV 3 344 * 15: MINUS DIV 4 345 */ 346 #define VSC_VALUE_C_DATA_OPT_OPT_3_MASK (0xF000U) 347 #define VSC_VALUE_C_DATA_OPT_OPT_3_SHIFT (12U) 348 #define VSC_VALUE_C_DATA_OPT_OPT_3_SET(x) (((uint32_t)(x) << VSC_VALUE_C_DATA_OPT_OPT_3_SHIFT) & VSC_VALUE_C_DATA_OPT_OPT_3_MASK) 349 #define VSC_VALUE_C_DATA_OPT_OPT_3_GET(x) (((uint32_t)(x) & VSC_VALUE_C_DATA_OPT_OPT_3_MASK) >> VSC_VALUE_C_DATA_OPT_OPT_3_SHIFT) 350 351 /* 352 * OPT_2 (RW) 353 * 354 * 0: PLUS_MUL_1 355 * 1: PLUS_MUL_2 356 * 5: PLUS_DIV_2 357 * 6: PLUS_DIV_3 358 * 7: PLUS_DIV_4 359 * 8: MINUS MUL 1 360 * 9: MINUS MUL 2 361 * 13: MINUS DIV 2 362 * 14: MINUS DIV 3 363 * 15: MINUS DIV 4 364 */ 365 #define VSC_VALUE_C_DATA_OPT_OPT_2_MASK (0xF00U) 366 #define VSC_VALUE_C_DATA_OPT_OPT_2_SHIFT (8U) 367 #define VSC_VALUE_C_DATA_OPT_OPT_2_SET(x) (((uint32_t)(x) << VSC_VALUE_C_DATA_OPT_OPT_2_SHIFT) & VSC_VALUE_C_DATA_OPT_OPT_2_MASK) 368 #define VSC_VALUE_C_DATA_OPT_OPT_2_GET(x) (((uint32_t)(x) & VSC_VALUE_C_DATA_OPT_OPT_2_MASK) >> VSC_VALUE_C_DATA_OPT_OPT_2_SHIFT) 369 370 /* 371 * OPT_1 (RW) 372 * 373 * 0: PLUS_MUL_1 374 * 1: PLUS_MUL_2 375 * 5: PLUS_DIV_2 376 * 6: PLUS_DIV_3 377 * 7: PLUS_DIV_4 378 * 8: MINUS MUL 1 379 * 9: MINUS MUL 2 380 * 13: MINUS DIV 2 381 * 14: MINUS DIV 3 382 * 15: MINUS DIV 4 383 */ 384 #define VSC_VALUE_C_DATA_OPT_OPT_1_MASK (0xF0U) 385 #define VSC_VALUE_C_DATA_OPT_OPT_1_SHIFT (4U) 386 #define VSC_VALUE_C_DATA_OPT_OPT_1_SET(x) (((uint32_t)(x) << VSC_VALUE_C_DATA_OPT_OPT_1_SHIFT) & VSC_VALUE_C_DATA_OPT_OPT_1_MASK) 387 #define VSC_VALUE_C_DATA_OPT_OPT_1_GET(x) (((uint32_t)(x) & VSC_VALUE_C_DATA_OPT_OPT_1_MASK) >> VSC_VALUE_C_DATA_OPT_OPT_1_SHIFT) 388 389 /* 390 * OPT_0 (RW) 391 * 392 * 0: PLUS_MUL_1 393 * 1: PLUS_MUL_2 394 * 5: PLUS_DIV_2 395 * 6: PLUS_DIV_3 396 * 7: PLUS_DIV_4 397 * 8: MINUS MUL 1 398 * 9: MINUS MUL 2 399 * 13: MINUS DIV 2 400 * 14: MINUS DIV 3 401 * 15: MINUS DIV 4 402 */ 403 #define VSC_VALUE_C_DATA_OPT_OPT_0_MASK (0xFU) 404 #define VSC_VALUE_C_DATA_OPT_OPT_0_SHIFT (0U) 405 #define VSC_VALUE_C_DATA_OPT_OPT_0_SET(x) (((uint32_t)(x) << VSC_VALUE_C_DATA_OPT_OPT_0_SHIFT) & VSC_VALUE_C_DATA_OPT_OPT_0_MASK) 406 #define VSC_VALUE_C_DATA_OPT_OPT_0_GET(x) (((uint32_t)(x) & VSC_VALUE_C_DATA_OPT_OPT_0_MASK) >> VSC_VALUE_C_DATA_OPT_OPT_0_SHIFT) 407 408 /* Bitfield definition for register: VALUE_A_OFFSET */ 409 /* 410 * VALUE_A_OFFSET (RW) 411 * 412 * value_a offset 413 */ 414 #define VSC_VALUE_A_OFFSET_VALUE_A_OFFSET_MASK (0xFFFFFFFFUL) 415 #define VSC_VALUE_A_OFFSET_VALUE_A_OFFSET_SHIFT (0U) 416 #define VSC_VALUE_A_OFFSET_VALUE_A_OFFSET_SET(x) (((uint32_t)(x) << VSC_VALUE_A_OFFSET_VALUE_A_OFFSET_SHIFT) & VSC_VALUE_A_OFFSET_VALUE_A_OFFSET_MASK) 417 #define VSC_VALUE_A_OFFSET_VALUE_A_OFFSET_GET(x) (((uint32_t)(x) & VSC_VALUE_A_OFFSET_VALUE_A_OFFSET_MASK) >> VSC_VALUE_A_OFFSET_VALUE_A_OFFSET_SHIFT) 418 419 /* Bitfield definition for register: VALUE_B_OFFSET */ 420 /* 421 * VALUE_B_OFFSET (RW) 422 * 423 * value_b_offset 424 */ 425 #define VSC_VALUE_B_OFFSET_VALUE_B_OFFSET_MASK (0xFFFFFFFFUL) 426 #define VSC_VALUE_B_OFFSET_VALUE_B_OFFSET_SHIFT (0U) 427 #define VSC_VALUE_B_OFFSET_VALUE_B_OFFSET_SET(x) (((uint32_t)(x) << VSC_VALUE_B_OFFSET_VALUE_B_OFFSET_SHIFT) & VSC_VALUE_B_OFFSET_VALUE_B_OFFSET_MASK) 428 #define VSC_VALUE_B_OFFSET_VALUE_B_OFFSET_GET(x) (((uint32_t)(x) & VSC_VALUE_B_OFFSET_VALUE_B_OFFSET_MASK) >> VSC_VALUE_B_OFFSET_VALUE_B_OFFSET_SHIFT) 429 430 /* Bitfield definition for register: VALUE_C_OFFSET */ 431 /* 432 * VALUE_C_OFFSET (RW) 433 * 434 * value_c offset 435 */ 436 #define VSC_VALUE_C_OFFSET_VALUE_C_OFFSET_MASK (0xFFFFFFFFUL) 437 #define VSC_VALUE_C_OFFSET_VALUE_C_OFFSET_SHIFT (0U) 438 #define VSC_VALUE_C_OFFSET_VALUE_C_OFFSET_SET(x) (((uint32_t)(x) << VSC_VALUE_C_OFFSET_VALUE_C_OFFSET_SHIFT) & VSC_VALUE_C_OFFSET_VALUE_C_OFFSET_MASK) 439 #define VSC_VALUE_C_OFFSET_VALUE_C_OFFSET_GET(x) (((uint32_t)(x) & VSC_VALUE_C_OFFSET_VALUE_C_OFFSET_MASK) >> VSC_VALUE_C_OFFSET_VALUE_C_OFFSET_SHIFT) 440 441 /* Bitfield definition for register: IRQ_STATUS */ 442 /* 443 * IRQ_STATUS (RW) 444 * 445 * irq status bit: 446 * bit0: vsc convert done irq. 447 * bit1: in adc three-phase mode, if ABS(value_a+value_b+value_c) > adc_phase_tolerate, will trigger irq. 448 * bit2: value_c overflow during capture process. 449 * bit3: value_b_overflow during capture process. 450 * bit4: value_a_overflow during capture process. 451 * bit5: adc2 chan not capture enough adc value. 452 * bit6: adc1 chan not capture enough adc value. 453 * bit7: adc0 chan not capture enough adc value. 454 * bit8: position not got valid before pos_wait_cycle timeout. 455 * bit9: adc2 wait cycle timeout. 456 * bit10: adc1 wait cycle timeout. 457 * bit11: adc0 wait cycle timeout. 458 * bit12: trigger_in break vsc convert even if adc or position is ready. 459 */ 460 #define VSC_IRQ_STATUS_IRQ_STATUS_MASK (0xFFFFFFFFUL) 461 #define VSC_IRQ_STATUS_IRQ_STATUS_SHIFT (0U) 462 #define VSC_IRQ_STATUS_IRQ_STATUS_SET(x) (((uint32_t)(x) << VSC_IRQ_STATUS_IRQ_STATUS_SHIFT) & VSC_IRQ_STATUS_IRQ_STATUS_MASK) 463 #define VSC_IRQ_STATUS_IRQ_STATUS_GET(x) (((uint32_t)(x) & VSC_IRQ_STATUS_IRQ_STATUS_MASK) >> VSC_IRQ_STATUS_IRQ_STATUS_SHIFT) 464 465 /* Bitfield definition for register: VALUE_A_SW */ 466 /* 467 * VALUE_A_SW (RW) 468 * 469 * value_a_sw 470 */ 471 #define VSC_VALUE_A_SW_VALUE_A_SW_MASK (0xFFFFFFFFUL) 472 #define VSC_VALUE_A_SW_VALUE_A_SW_SHIFT (0U) 473 #define VSC_VALUE_A_SW_VALUE_A_SW_SET(x) (((uint32_t)(x) << VSC_VALUE_A_SW_VALUE_A_SW_SHIFT) & VSC_VALUE_A_SW_VALUE_A_SW_MASK) 474 #define VSC_VALUE_A_SW_VALUE_A_SW_GET(x) (((uint32_t)(x) & VSC_VALUE_A_SW_VALUE_A_SW_MASK) >> VSC_VALUE_A_SW_VALUE_A_SW_SHIFT) 475 476 /* Bitfield definition for register: VALUE_B_SW */ 477 /* 478 * VALUE_B_SW (RW) 479 * 480 * value_b_sw 481 */ 482 #define VSC_VALUE_B_SW_VALUE_B_SW_MASK (0xFFFFFFFFUL) 483 #define VSC_VALUE_B_SW_VALUE_B_SW_SHIFT (0U) 484 #define VSC_VALUE_B_SW_VALUE_B_SW_SET(x) (((uint32_t)(x) << VSC_VALUE_B_SW_VALUE_B_SW_SHIFT) & VSC_VALUE_B_SW_VALUE_B_SW_MASK) 485 #define VSC_VALUE_B_SW_VALUE_B_SW_GET(x) (((uint32_t)(x) & VSC_VALUE_B_SW_VALUE_B_SW_MASK) >> VSC_VALUE_B_SW_VALUE_B_SW_SHIFT) 486 487 /* Bitfield definition for register: VALUE_C_SW */ 488 /* 489 * VALUE_C_SW (RW) 490 * 491 * value_c_sw 492 */ 493 #define VSC_VALUE_C_SW_VALUE_C_SW_MASK (0xFFFFFFFFUL) 494 #define VSC_VALUE_C_SW_VALUE_C_SW_SHIFT (0U) 495 #define VSC_VALUE_C_SW_VALUE_C_SW_SET(x) (((uint32_t)(x) << VSC_VALUE_C_SW_VALUE_C_SW_SHIFT) & VSC_VALUE_C_SW_VALUE_C_SW_MASK) 496 #define VSC_VALUE_C_SW_VALUE_C_SW_GET(x) (((uint32_t)(x) & VSC_VALUE_C_SW_VALUE_C_SW_MASK) >> VSC_VALUE_C_SW_VALUE_C_SW_SHIFT) 497 498 /* Bitfield definition for register: VALUE_SW_READY */ 499 /* 500 * VALUE_SW_READY (W1C) 501 * 502 * software inject value_a/value_b/value_c ready 503 */ 504 #define VSC_VALUE_SW_READY_VALUE_SW_READY_MASK (0x1U) 505 #define VSC_VALUE_SW_READY_VALUE_SW_READY_SHIFT (0U) 506 #define VSC_VALUE_SW_READY_VALUE_SW_READY_SET(x) (((uint32_t)(x) << VSC_VALUE_SW_READY_VALUE_SW_READY_SHIFT) & VSC_VALUE_SW_READY_VALUE_SW_READY_MASK) 507 #define VSC_VALUE_SW_READY_VALUE_SW_READY_GET(x) (((uint32_t)(x) & VSC_VALUE_SW_READY_VALUE_SW_READY_MASK) >> VSC_VALUE_SW_READY_VALUE_SW_READY_SHIFT) 508 509 /* Bitfield definition for register: TRIGGER_SW */ 510 /* 511 * TRIGGER_SW (W1C) 512 * 513 * software trigger to start waiting adc capture value, same as hardwire trigger_in 514 */ 515 #define VSC_TRIGGER_SW_TRIGGER_SW_MASK (0x1U) 516 #define VSC_TRIGGER_SW_TRIGGER_SW_SHIFT (0U) 517 #define VSC_TRIGGER_SW_TRIGGER_SW_SET(x) (((uint32_t)(x) << VSC_TRIGGER_SW_TRIGGER_SW_SHIFT) & VSC_TRIGGER_SW_TRIGGER_SW_MASK) 518 #define VSC_TRIGGER_SW_TRIGGER_SW_GET(x) (((uint32_t)(x) & VSC_TRIGGER_SW_TRIGGER_SW_MASK) >> VSC_TRIGGER_SW_TRIGGER_SW_SHIFT) 519 520 /* Bitfield definition for register: TIMELOCK */ 521 /* 522 * POSITION_CAPTURE_MODE (RW) 523 * 524 * postion capture mode: 525 * 00: position use last valid data when adc value capture finish 526 * 01: position use frist valid data after adc value capture 527 * 10: position use last valid data before adc value capture 528 * other: reserved 529 */ 530 #define VSC_TIMELOCK_POSITION_CAPTURE_MODE_MASK (0x3000U) 531 #define VSC_TIMELOCK_POSITION_CAPTURE_MODE_SHIFT (12U) 532 #define VSC_TIMELOCK_POSITION_CAPTURE_MODE_SET(x) (((uint32_t)(x) << VSC_TIMELOCK_POSITION_CAPTURE_MODE_SHIFT) & VSC_TIMELOCK_POSITION_CAPTURE_MODE_MASK) 533 #define VSC_TIMELOCK_POSITION_CAPTURE_MODE_GET(x) (((uint32_t)(x) & VSC_TIMELOCK_POSITION_CAPTURE_MODE_MASK) >> VSC_TIMELOCK_POSITION_CAPTURE_MODE_SHIFT) 534 535 /* 536 * ADC_TIMESTAMP_SEL (RW) 537 * 538 * adc timestamp select: 539 * 0:reserved; 540 * 1: from value_a; 541 * 2: from value_b; 542 * 3: from value_c; 543 */ 544 #define VSC_TIMELOCK_ADC_TIMESTAMP_SEL_MASK (0x30U) 545 #define VSC_TIMELOCK_ADC_TIMESTAMP_SEL_SHIFT (4U) 546 #define VSC_TIMELOCK_ADC_TIMESTAMP_SEL_SET(x) (((uint32_t)(x) << VSC_TIMELOCK_ADC_TIMESTAMP_SEL_SHIFT) & VSC_TIMELOCK_ADC_TIMESTAMP_SEL_MASK) 547 #define VSC_TIMELOCK_ADC_TIMESTAMP_SEL_GET(x) (((uint32_t)(x) & VSC_TIMELOCK_ADC_TIMESTAMP_SEL_MASK) >> VSC_TIMELOCK_ADC_TIMESTAMP_SEL_SHIFT) 548 549 /* 550 * VALUE_COUNTER_SEL (RW) 551 * 552 * adc timestamp use which number index of adc_timestamp_sel used. 553 */ 554 #define VSC_TIMELOCK_VALUE_COUNTER_SEL_MASK (0xFU) 555 #define VSC_TIMELOCK_VALUE_COUNTER_SEL_SHIFT (0U) 556 #define VSC_TIMELOCK_VALUE_COUNTER_SEL_SET(x) (((uint32_t)(x) << VSC_TIMELOCK_VALUE_COUNTER_SEL_SHIFT) & VSC_TIMELOCK_VALUE_COUNTER_SEL_MASK) 557 #define VSC_TIMELOCK_VALUE_COUNTER_SEL_GET(x) (((uint32_t)(x) & VSC_TIMELOCK_VALUE_COUNTER_SEL_MASK) >> VSC_TIMELOCK_VALUE_COUNTER_SEL_SHIFT) 558 559 /* Bitfield definition for register: POSITION_SW */ 560 /* 561 * POSITION_SW (RW) 562 * 563 * position_sw 564 */ 565 #define VSC_POSITION_SW_POSITION_SW_MASK (0xFFFFFFFFUL) 566 #define VSC_POSITION_SW_POSITION_SW_SHIFT (0U) 567 #define VSC_POSITION_SW_POSITION_SW_SET(x) (((uint32_t)(x) << VSC_POSITION_SW_POSITION_SW_SHIFT) & VSC_POSITION_SW_POSITION_SW_MASK) 568 #define VSC_POSITION_SW_POSITION_SW_GET(x) (((uint32_t)(x) & VSC_POSITION_SW_POSITION_SW_MASK) >> VSC_POSITION_SW_POSITION_SW_SHIFT) 569 570 /* Bitfield definition for register: ADC_WAIT_CYCLE */ 571 /* 572 * ADC_WAIT_CYCLE (RW) 573 * 574 * adc wait cycle after trigger adc capture event 575 */ 576 #define VSC_ADC_WAIT_CYCLE_ADC_WAIT_CYCLE_MASK (0xFFFFFFFFUL) 577 #define VSC_ADC_WAIT_CYCLE_ADC_WAIT_CYCLE_SHIFT (0U) 578 #define VSC_ADC_WAIT_CYCLE_ADC_WAIT_CYCLE_SET(x) (((uint32_t)(x) << VSC_ADC_WAIT_CYCLE_ADC_WAIT_CYCLE_SHIFT) & VSC_ADC_WAIT_CYCLE_ADC_WAIT_CYCLE_MASK) 579 #define VSC_ADC_WAIT_CYCLE_ADC_WAIT_CYCLE_GET(x) (((uint32_t)(x) & VSC_ADC_WAIT_CYCLE_ADC_WAIT_CYCLE_MASK) >> VSC_ADC_WAIT_CYCLE_ADC_WAIT_CYCLE_SHIFT) 580 581 /* Bitfield definition for register: POS_WAIT_CYCLE */ 582 /* 583 * POS_WAIT_CYCLE (RW) 584 * 585 * position wait cycle after trigger adc capture event 586 */ 587 #define VSC_POS_WAIT_CYCLE_POS_WAIT_CYCLE_MASK (0xFFFFFFFFUL) 588 #define VSC_POS_WAIT_CYCLE_POS_WAIT_CYCLE_SHIFT (0U) 589 #define VSC_POS_WAIT_CYCLE_POS_WAIT_CYCLE_SET(x) (((uint32_t)(x) << VSC_POS_WAIT_CYCLE_POS_WAIT_CYCLE_SHIFT) & VSC_POS_WAIT_CYCLE_POS_WAIT_CYCLE_MASK) 590 #define VSC_POS_WAIT_CYCLE_POS_WAIT_CYCLE_GET(x) (((uint32_t)(x) & VSC_POS_WAIT_CYCLE_POS_WAIT_CYCLE_MASK) >> VSC_POS_WAIT_CYCLE_POS_WAIT_CYCLE_SHIFT) 591 592 /* Bitfield definition for register: IRQ_ENABLE */ 593 /* 594 * IRQ_ENABLE (RW) 595 * 596 * irq enable bit: 597 * bit0: vsc convert done irq. 598 * bit1: in adc three-phase mode, if ABS(value_a+value_b+value_c) > adc_phase_tolerate, will trigger irq. 599 * bit2: value_c overflow during capture process. 600 * bit3: value_b_overflow during capture process. 601 * bit4: value_a_overflow during capture process. 602 * bit5: adc2 chan not capture enough adc value. 603 * bit6: adc1 chan not capture enough adc value. 604 * bit7: adc0 chan not capture enough adc value. 605 * bit8: position not got valid before pos_wait_cycle timeout. 606 * bit9: adc2 wait cycle timeout. 607 * bit10: adc1 wait cycle timeout. 608 * bit11: adc0 wait cycle timeout. 609 * bit12: trigger_in break vsc convert even if adc or position is ready. 610 */ 611 #define VSC_IRQ_ENABLE_IRQ_ENABLE_MASK (0xFFFFFFFFUL) 612 #define VSC_IRQ_ENABLE_IRQ_ENABLE_SHIFT (0U) 613 #define VSC_IRQ_ENABLE_IRQ_ENABLE_SET(x) (((uint32_t)(x) << VSC_IRQ_ENABLE_IRQ_ENABLE_SHIFT) & VSC_IRQ_ENABLE_IRQ_ENABLE_MASK) 614 #define VSC_IRQ_ENABLE_IRQ_ENABLE_GET(x) (((uint32_t)(x) & VSC_IRQ_ENABLE_IRQ_ENABLE_MASK) >> VSC_IRQ_ENABLE_IRQ_ENABLE_SHIFT) 615 616 /* Bitfield definition for register: ADC_PHASE_TOLERATE */ 617 /* 618 * ADC_PHASE_TOLERATE (RW) 619 * 620 * in adc three-phase mode, if ABS(value_a+value_b+value_c) > adc_phase_tolerate, will trigger irq. 621 */ 622 #define VSC_ADC_PHASE_TOLERATE_ADC_PHASE_TOLERATE_MASK (0xFFFFFFFFUL) 623 #define VSC_ADC_PHASE_TOLERATE_ADC_PHASE_TOLERATE_SHIFT (0U) 624 #define VSC_ADC_PHASE_TOLERATE_ADC_PHASE_TOLERATE_SET(x) (((uint32_t)(x) << VSC_ADC_PHASE_TOLERATE_ADC_PHASE_TOLERATE_SHIFT) & VSC_ADC_PHASE_TOLERATE_ADC_PHASE_TOLERATE_MASK) 625 #define VSC_ADC_PHASE_TOLERATE_ADC_PHASE_TOLERATE_GET(x) (((uint32_t)(x) & VSC_ADC_PHASE_TOLERATE_ADC_PHASE_TOLERATE_MASK) >> VSC_ADC_PHASE_TOLERATE_ADC_PHASE_TOLERATE_SHIFT) 626 627 /* Bitfield definition for register: POS_POLE */ 628 /* 629 * POS_POLE (RW) 630 * 631 * pole number 632 */ 633 #define VSC_POS_POLE_POS_POLE_MASK (0xFFFFU) 634 #define VSC_POS_POLE_POS_POLE_SHIFT (0U) 635 #define VSC_POS_POLE_POS_POLE_SET(x) (((uint32_t)(x) << VSC_POS_POLE_POS_POLE_SHIFT) & VSC_POS_POLE_POS_POLE_MASK) 636 #define VSC_POS_POLE_POS_POLE_GET(x) (((uint32_t)(x) & VSC_POS_POLE_POS_POLE_MASK) >> VSC_POS_POLE_POS_POLE_SHIFT) 637 638 /* Bitfield definition for register: ID_POSEDGE */ 639 /* 640 * ID_POSEDGE (RO) 641 * 642 * posedge order Id value 643 */ 644 #define VSC_ID_POSEDGE_ID_POSEDGE_MASK (0xFFFFFFFFUL) 645 #define VSC_ID_POSEDGE_ID_POSEDGE_SHIFT (0U) 646 #define VSC_ID_POSEDGE_ID_POSEDGE_GET(x) (((uint32_t)(x) & VSC_ID_POSEDGE_ID_POSEDGE_MASK) >> VSC_ID_POSEDGE_ID_POSEDGE_SHIFT) 647 648 /* Bitfield definition for register: IQ_POSEDGE */ 649 /* 650 * IQ_POSEDGE (RO) 651 * 652 * posedge order Iq value 653 */ 654 #define VSC_IQ_POSEDGE_IQ_POSEDGE_MASK (0xFFFFFFFFUL) 655 #define VSC_IQ_POSEDGE_IQ_POSEDGE_SHIFT (0U) 656 #define VSC_IQ_POSEDGE_IQ_POSEDGE_GET(x) (((uint32_t)(x) & VSC_IQ_POSEDGE_IQ_POSEDGE_MASK) >> VSC_IQ_POSEDGE_IQ_POSEDGE_SHIFT) 657 658 /* Bitfield definition for register: ID_NEGEDGE */ 659 /* 660 * ID_NEGEDGE (RO) 661 * 662 * negedge order Id value 663 */ 664 #define VSC_ID_NEGEDGE_ID_NEGEDGE_MASK (0xFFFFFFFFUL) 665 #define VSC_ID_NEGEDGE_ID_NEGEDGE_SHIFT (0U) 666 #define VSC_ID_NEGEDGE_ID_NEGEDGE_GET(x) (((uint32_t)(x) & VSC_ID_NEGEDGE_ID_NEGEDGE_MASK) >> VSC_ID_NEGEDGE_ID_NEGEDGE_SHIFT) 667 668 /* Bitfield definition for register: IQ_NEGEDGE */ 669 /* 670 * IQ_NEGEDGE (RO) 671 * 672 * negedge order Iq value 673 */ 674 #define VSC_IQ_NEGEDGE_IQ_NEGEDGE_MASK (0xFFFFFFFFUL) 675 #define VSC_IQ_NEGEDGE_IQ_NEGEDGE_SHIFT (0U) 676 #define VSC_IQ_NEGEDGE_IQ_NEGEDGE_GET(x) (((uint32_t)(x) & VSC_IQ_NEGEDGE_IQ_NEGEDGE_MASK) >> VSC_IQ_NEGEDGE_IQ_NEGEDGE_SHIFT) 677 678 /* Bitfield definition for register: ALPHA_POSEDGE */ 679 /* 680 * ALPHA_POSEDGE (RO) 681 * 682 * posedge order alpha value 683 */ 684 #define VSC_ALPHA_POSEDGE_ALPHA_POSEDGE_MASK (0xFFFFFFFFUL) 685 #define VSC_ALPHA_POSEDGE_ALPHA_POSEDGE_SHIFT (0U) 686 #define VSC_ALPHA_POSEDGE_ALPHA_POSEDGE_GET(x) (((uint32_t)(x) & VSC_ALPHA_POSEDGE_ALPHA_POSEDGE_MASK) >> VSC_ALPHA_POSEDGE_ALPHA_POSEDGE_SHIFT) 687 688 /* Bitfield definition for register: BETA_POSEDGE */ 689 /* 690 * BETA_POSEDGE (RO) 691 * 692 * posedge order beta value 693 */ 694 #define VSC_BETA_POSEDGE_BETA_POSEDGE_MASK (0xFFFFFFFFUL) 695 #define VSC_BETA_POSEDGE_BETA_POSEDGE_SHIFT (0U) 696 #define VSC_BETA_POSEDGE_BETA_POSEDGE_GET(x) (((uint32_t)(x) & VSC_BETA_POSEDGE_BETA_POSEDGE_MASK) >> VSC_BETA_POSEDGE_BETA_POSEDGE_SHIFT) 697 698 /* Bitfield definition for register: ALPHA_NEGEDGE */ 699 /* 700 * ALPHA_NEGEDGE (RO) 701 * 702 * negedge order alpha value 703 */ 704 #define VSC_ALPHA_NEGEDGE_ALPHA_NEGEDGE_MASK (0xFFFFFFFFUL) 705 #define VSC_ALPHA_NEGEDGE_ALPHA_NEGEDGE_SHIFT (0U) 706 #define VSC_ALPHA_NEGEDGE_ALPHA_NEGEDGE_GET(x) (((uint32_t)(x) & VSC_ALPHA_NEGEDGE_ALPHA_NEGEDGE_MASK) >> VSC_ALPHA_NEGEDGE_ALPHA_NEGEDGE_SHIFT) 707 708 /* Bitfield definition for register: BETA_NEGEDGE */ 709 /* 710 * BETA_NEGEDGE (RO) 711 * 712 * negedge order beta value 713 */ 714 #define VSC_BETA_NEGEDGE_BETA_NEGEDGE_MASK (0xFFFFFFFFUL) 715 #define VSC_BETA_NEGEDGE_BETA_NEGEDGE_SHIFT (0U) 716 #define VSC_BETA_NEGEDGE_BETA_NEGEDGE_GET(x) (((uint32_t)(x) & VSC_BETA_NEGEDGE_BETA_NEGEDGE_MASK) >> VSC_BETA_NEGEDGE_BETA_NEGEDGE_SHIFT) 717 718 /* Bitfield definition for register: TIMESTAMP_LOCKED */ 719 /* 720 * TIMESTAMP_LOCKED (RO) 721 * 722 * timestamp_locked 723 */ 724 #define VSC_TIMESTAMP_LOCKED_TIMESTAMP_LOCKED_MASK (0xFFFFFFFFUL) 725 #define VSC_TIMESTAMP_LOCKED_TIMESTAMP_LOCKED_SHIFT (0U) 726 #define VSC_TIMESTAMP_LOCKED_TIMESTAMP_LOCKED_GET(x) (((uint32_t)(x) & VSC_TIMESTAMP_LOCKED_TIMESTAMP_LOCKED_MASK) >> VSC_TIMESTAMP_LOCKED_TIMESTAMP_LOCKED_SHIFT) 727 728 /* Bitfield definition for register: DEBUG_STATUS0 */ 729 /* 730 * VALUE_A_COUNTER (RO) 731 * 732 * value_a_counter 733 */ 734 #define VSC_DEBUG_STATUS0_VALUE_A_COUNTER_MASK (0xF00U) 735 #define VSC_DEBUG_STATUS0_VALUE_A_COUNTER_SHIFT (8U) 736 #define VSC_DEBUG_STATUS0_VALUE_A_COUNTER_GET(x) (((uint32_t)(x) & VSC_DEBUG_STATUS0_VALUE_A_COUNTER_MASK) >> VSC_DEBUG_STATUS0_VALUE_A_COUNTER_SHIFT) 737 738 /* 739 * VALUE_B_COUNTER (RO) 740 * 741 * value_b_counter 742 */ 743 #define VSC_DEBUG_STATUS0_VALUE_B_COUNTER_MASK (0xF0U) 744 #define VSC_DEBUG_STATUS0_VALUE_B_COUNTER_SHIFT (4U) 745 #define VSC_DEBUG_STATUS0_VALUE_B_COUNTER_GET(x) (((uint32_t)(x) & VSC_DEBUG_STATUS0_VALUE_B_COUNTER_MASK) >> VSC_DEBUG_STATUS0_VALUE_B_COUNTER_SHIFT) 746 747 /* 748 * VALUE_C_COUNTER (RO) 749 * 750 * value_c_counter 751 */ 752 #define VSC_DEBUG_STATUS0_VALUE_C_COUNTER_MASK (0xFU) 753 #define VSC_DEBUG_STATUS0_VALUE_C_COUNTER_SHIFT (0U) 754 #define VSC_DEBUG_STATUS0_VALUE_C_COUNTER_GET(x) (((uint32_t)(x) & VSC_DEBUG_STATUS0_VALUE_C_COUNTER_MASK) >> VSC_DEBUG_STATUS0_VALUE_C_COUNTER_SHIFT) 755 756 757 758 759 #endif /* HPM_VSC_H */ 760