Lines Matching refs:ctx

17 static void skl_cldma_int_enable(struct sst_dsp *ctx)  in skl_cldma_int_enable()  argument
19 sst_dsp_shim_update_bits_unlocked(ctx, SKL_ADSP_REG_ADSPIC, in skl_cldma_int_enable()
23 void skl_cldma_int_disable(struct sst_dsp *ctx) in skl_cldma_int_disable() argument
25 sst_dsp_shim_update_bits_unlocked(ctx, in skl_cldma_int_disable()
29 static void skl_cldma_stream_run(struct sst_dsp *ctx, bool enable) in skl_cldma_stream_run() argument
34 sst_dsp_shim_update_bits_unlocked(ctx, in skl_cldma_stream_run()
42 val = sst_dsp_shim_read(ctx, SKL_ADSP_REG_CL_SD_CTL) & in skl_cldma_stream_run()
52 dev_err(ctx->dev, "Failed to set Run bit=%d enable=%d\n", val, enable); in skl_cldma_stream_run()
55 static void skl_cldma_stream_clear(struct sst_dsp *ctx) in skl_cldma_stream_clear() argument
58 skl_cldma_stream_run(ctx, 0); in skl_cldma_stream_clear()
60 sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_CL_SD_CTL, in skl_cldma_stream_clear()
62 sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_CL_SD_CTL, in skl_cldma_stream_clear()
64 sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_CL_SD_CTL, in skl_cldma_stream_clear()
66 sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_CL_SD_CTL, in skl_cldma_stream_clear()
69 sst_dsp_shim_write(ctx, SKL_ADSP_REG_CL_SD_BDLPL, CL_SD_BDLPLBA(0)); in skl_cldma_stream_clear()
70 sst_dsp_shim_write(ctx, SKL_ADSP_REG_CL_SD_BDLPU, 0); in skl_cldma_stream_clear()
72 sst_dsp_shim_write(ctx, SKL_ADSP_REG_CL_SD_CBL, 0); in skl_cldma_stream_clear()
73 sst_dsp_shim_write(ctx, SKL_ADSP_REG_CL_SD_LVI, 0); in skl_cldma_stream_clear()
77 static void skl_cldma_setup_bdle(struct sst_dsp *ctx, in skl_cldma_setup_bdle() argument
83 ctx->cl_dev.frags = 0; in skl_cldma_setup_bdle()
86 (ctx->cl_dev.frags * ctx->cl_dev.bufsize)); in skl_cldma_setup_bdle()
91 bdl[2] = cpu_to_le32(ctx->cl_dev.bufsize); in skl_cldma_setup_bdle()
93 size -= ctx->cl_dev.bufsize; in skl_cldma_setup_bdle()
97 ctx->cl_dev.frags++; in skl_cldma_setup_bdle()
107 static void skl_cldma_setup_controller(struct sst_dsp *ctx, in skl_cldma_setup_controller() argument
111 skl_cldma_stream_clear(ctx); in skl_cldma_setup_controller()
112 sst_dsp_shim_write(ctx, SKL_ADSP_REG_CL_SD_BDLPL, in skl_cldma_setup_controller()
114 sst_dsp_shim_write(ctx, SKL_ADSP_REG_CL_SD_BDLPU, in skl_cldma_setup_controller()
117 sst_dsp_shim_write(ctx, SKL_ADSP_REG_CL_SD_CBL, max_size); in skl_cldma_setup_controller()
118 sst_dsp_shim_write(ctx, SKL_ADSP_REG_CL_SD_LVI, count - 1); in skl_cldma_setup_controller()
119 sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_CL_SD_CTL, in skl_cldma_setup_controller()
121 sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_CL_SD_CTL, in skl_cldma_setup_controller()
123 sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_CL_SD_CTL, in skl_cldma_setup_controller()
125 sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_CL_SD_CTL, in skl_cldma_setup_controller()
129 static void skl_cldma_setup_spb(struct sst_dsp *ctx, in skl_cldma_setup_spb() argument
133 sst_dsp_shim_update_bits_unlocked(ctx, in skl_cldma_setup_spb()
138 sst_dsp_shim_write_unlocked(ctx, SKL_ADSP_REG_CL_SPBFIFO_SPIB, size); in skl_cldma_setup_spb()
141 static void skl_cldma_cleanup_spb(struct sst_dsp *ctx) in skl_cldma_cleanup_spb() argument
143 sst_dsp_shim_update_bits_unlocked(ctx, in skl_cldma_cleanup_spb()
148 sst_dsp_shim_write_unlocked(ctx, SKL_ADSP_REG_CL_SPBFIFO_SPIB, 0); in skl_cldma_cleanup_spb()
151 static void skl_cldma_cleanup(struct sst_dsp *ctx) in skl_cldma_cleanup() argument
153 skl_cldma_cleanup_spb(ctx); in skl_cldma_cleanup()
154 skl_cldma_stream_clear(ctx); in skl_cldma_cleanup()
156 ctx->dsp_ops.free_dma_buf(ctx->dev, &ctx->cl_dev.dmab_data); in skl_cldma_cleanup()
157 ctx->dsp_ops.free_dma_buf(ctx->dev, &ctx->cl_dev.dmab_bdl); in skl_cldma_cleanup()
160 int skl_cldma_wait_interruptible(struct sst_dsp *ctx) in skl_cldma_wait_interruptible() argument
164 if (!wait_event_timeout(ctx->cl_dev.wait_queue, in skl_cldma_wait_interruptible()
165 ctx->cl_dev.wait_condition, in skl_cldma_wait_interruptible()
167 dev_err(ctx->dev, "%s: Wait timeout\n", __func__); in skl_cldma_wait_interruptible()
172 dev_dbg(ctx->dev, "%s: Event wake\n", __func__); in skl_cldma_wait_interruptible()
173 if (ctx->cl_dev.wake_status != SKL_CL_DMA_BUF_COMPLETE) { in skl_cldma_wait_interruptible()
174 dev_err(ctx->dev, "%s: DMA Error\n", __func__); in skl_cldma_wait_interruptible()
179 ctx->cl_dev.wake_status = SKL_CL_DMA_STATUS_NONE; in skl_cldma_wait_interruptible()
183 static void skl_cldma_stop(struct sst_dsp *ctx) in skl_cldma_stop() argument
185 skl_cldma_stream_run(ctx, false); in skl_cldma_stop()
188 static void skl_cldma_fill_buffer(struct sst_dsp *ctx, unsigned int size, in skl_cldma_fill_buffer() argument
191 dev_dbg(ctx->dev, "Size: %x, intr_enable: %d\n", size, intr_enable); in skl_cldma_fill_buffer()
192 dev_dbg(ctx->dev, "buf_pos_index:%d, trigger:%d\n", in skl_cldma_fill_buffer()
193 ctx->cl_dev.dma_buffer_offset, trigger); in skl_cldma_fill_buffer()
194 dev_dbg(ctx->dev, "spib position: %d\n", ctx->cl_dev.curr_spib_pos); in skl_cldma_fill_buffer()
201 if (ctx->cl_dev.dma_buffer_offset + size > ctx->cl_dev.bufsize) { in skl_cldma_fill_buffer()
202 unsigned int size_b = ctx->cl_dev.bufsize - in skl_cldma_fill_buffer()
203 ctx->cl_dev.dma_buffer_offset; in skl_cldma_fill_buffer()
204 memcpy(ctx->cl_dev.dmab_data.area + ctx->cl_dev.dma_buffer_offset, in skl_cldma_fill_buffer()
208 ctx->cl_dev.dma_buffer_offset = 0; in skl_cldma_fill_buffer()
211 memcpy(ctx->cl_dev.dmab_data.area + ctx->cl_dev.dma_buffer_offset, in skl_cldma_fill_buffer()
214 if (ctx->cl_dev.curr_spib_pos == ctx->cl_dev.bufsize) in skl_cldma_fill_buffer()
215 ctx->cl_dev.dma_buffer_offset = 0; in skl_cldma_fill_buffer()
217 ctx->cl_dev.dma_buffer_offset = ctx->cl_dev.curr_spib_pos; in skl_cldma_fill_buffer()
219 ctx->cl_dev.wait_condition = false; in skl_cldma_fill_buffer()
222 skl_cldma_int_enable(ctx); in skl_cldma_fill_buffer()
224 ctx->cl_dev.ops.cl_setup_spb(ctx, ctx->cl_dev.curr_spib_pos, trigger); in skl_cldma_fill_buffer()
226 ctx->cl_dev.ops.cl_trigger(ctx, true); in skl_cldma_fill_buffer()
245 skl_cldma_copy_to_buf(struct sst_dsp *ctx, const void *bin, in skl_cldma_copy_to_buf() argument
258 dev_dbg(ctx->dev, "%s: Total binary size: %u\n", __func__, bytes_left); in skl_cldma_copy_to_buf()
261 if (bytes_left > ctx->cl_dev.bufsize) { in skl_cldma_copy_to_buf()
267 if (ctx->cl_dev.curr_spib_pos == 0) in skl_cldma_copy_to_buf()
268 ctx->cl_dev.curr_spib_pos = ctx->cl_dev.bufsize; in skl_cldma_copy_to_buf()
270 size = ctx->cl_dev.bufsize; in skl_cldma_copy_to_buf()
271 skl_cldma_fill_buffer(ctx, size, curr_pos, true, start); in skl_cldma_copy_to_buf()
275 ret = skl_cldma_wait_interruptible(ctx); in skl_cldma_copy_to_buf()
277 skl_cldma_stop(ctx); in skl_cldma_copy_to_buf()
282 skl_cldma_int_disable(ctx); in skl_cldma_copy_to_buf()
284 if ((ctx->cl_dev.curr_spib_pos + bytes_left) in skl_cldma_copy_to_buf()
285 <= ctx->cl_dev.bufsize) { in skl_cldma_copy_to_buf()
286 ctx->cl_dev.curr_spib_pos += bytes_left; in skl_cldma_copy_to_buf()
289 (ctx->cl_dev.bufsize - in skl_cldma_copy_to_buf()
290 ctx->cl_dev.curr_spib_pos); in skl_cldma_copy_to_buf()
291 ctx->cl_dev.curr_spib_pos = excess_bytes; in skl_cldma_copy_to_buf()
295 skl_cldma_fill_buffer(ctx, size, in skl_cldma_copy_to_buf()
307 void skl_cldma_process_intr(struct sst_dsp *ctx) in skl_cldma_process_intr() argument
312 sst_dsp_shim_read_unlocked(ctx, SKL_ADSP_REG_CL_SD_STS); in skl_cldma_process_intr()
315 ctx->cl_dev.wake_status = SKL_CL_DMA_ERR; in skl_cldma_process_intr()
317 ctx->cl_dev.wake_status = SKL_CL_DMA_BUF_COMPLETE; in skl_cldma_process_intr()
319 ctx->cl_dev.wait_condition = true; in skl_cldma_process_intr()
320 wake_up(&ctx->cl_dev.wait_queue); in skl_cldma_process_intr()
323 int skl_cldma_prepare(struct sst_dsp *ctx) in skl_cldma_prepare() argument
328 ctx->cl_dev.bufsize = SKL_MAX_BUFFER_SIZE; in skl_cldma_prepare()
331 ctx->cl_dev.ops.cl_setup_bdle = skl_cldma_setup_bdle; in skl_cldma_prepare()
332 ctx->cl_dev.ops.cl_setup_controller = skl_cldma_setup_controller; in skl_cldma_prepare()
333 ctx->cl_dev.ops.cl_setup_spb = skl_cldma_setup_spb; in skl_cldma_prepare()
334 ctx->cl_dev.ops.cl_cleanup_spb = skl_cldma_cleanup_spb; in skl_cldma_prepare()
335 ctx->cl_dev.ops.cl_trigger = skl_cldma_stream_run; in skl_cldma_prepare()
336 ctx->cl_dev.ops.cl_cleanup_controller = skl_cldma_cleanup; in skl_cldma_prepare()
337 ctx->cl_dev.ops.cl_copy_to_dmabuf = skl_cldma_copy_to_buf; in skl_cldma_prepare()
338 ctx->cl_dev.ops.cl_stop_dma = skl_cldma_stop; in skl_cldma_prepare()
341 ret = ctx->dsp_ops.alloc_dma_buf(ctx->dev, in skl_cldma_prepare()
342 &ctx->cl_dev.dmab_data, ctx->cl_dev.bufsize); in skl_cldma_prepare()
344 dev_err(ctx->dev, "Alloc buffer for base fw failed: %x\n", ret); in skl_cldma_prepare()
348 ret = ctx->dsp_ops.alloc_dma_buf(ctx->dev, in skl_cldma_prepare()
349 &ctx->cl_dev.dmab_bdl, PAGE_SIZE); in skl_cldma_prepare()
351 dev_err(ctx->dev, "Alloc buffer for blde failed: %x\n", ret); in skl_cldma_prepare()
352 ctx->dsp_ops.free_dma_buf(ctx->dev, &ctx->cl_dev.dmab_data); in skl_cldma_prepare()
355 bdl = (__le32 *)ctx->cl_dev.dmab_bdl.area; in skl_cldma_prepare()
358 ctx->cl_dev.ops.cl_setup_bdle(ctx, &ctx->cl_dev.dmab_data, in skl_cldma_prepare()
359 &bdl, ctx->cl_dev.bufsize, 1); in skl_cldma_prepare()
360 ctx->cl_dev.ops.cl_setup_controller(ctx, &ctx->cl_dev.dmab_bdl, in skl_cldma_prepare()
361 ctx->cl_dev.bufsize, ctx->cl_dev.frags); in skl_cldma_prepare()
363 ctx->cl_dev.curr_spib_pos = 0; in skl_cldma_prepare()
364 ctx->cl_dev.dma_buffer_offset = 0; in skl_cldma_prepare()
365 init_waitqueue_head(&ctx->cl_dev.wait_queue); in skl_cldma_prepare()