Lines Matching refs:vcrypto
32 struct virtio_crypto *vcrypto = vq->vdev->priv; in virtcrypto_ctrlq_callback() local
37 spin_lock_irqsave(&vcrypto->ctrl_lock, flags); in virtcrypto_ctrlq_callback()
41 spin_unlock_irqrestore(&vcrypto->ctrl_lock, flags); in virtcrypto_ctrlq_callback()
43 spin_lock_irqsave(&vcrypto->ctrl_lock, flags); in virtcrypto_ctrlq_callback()
46 spin_unlock_irqrestore(&vcrypto->ctrl_lock, flags); in virtcrypto_ctrlq_callback()
49 int virtio_crypto_ctrl_vq_request(struct virtio_crypto *vcrypto, struct scatterlist *sgs[], in virtio_crypto_ctrl_vq_request() argument
58 spin_lock_irqsave(&vcrypto->ctrl_lock, flags); in virtio_crypto_ctrl_vq_request()
59 err = virtqueue_add_sgs(vcrypto->ctrl_vq, sgs, out_sgs, in_sgs, vc_ctrl_req, GFP_ATOMIC); in virtio_crypto_ctrl_vq_request()
61 spin_unlock_irqrestore(&vcrypto->ctrl_lock, flags); in virtio_crypto_ctrl_vq_request()
65 virtqueue_kick(vcrypto->ctrl_vq); in virtio_crypto_ctrl_vq_request()
66 spin_unlock_irqrestore(&vcrypto->ctrl_lock, flags); in virtio_crypto_ctrl_vq_request()
91 struct virtio_crypto *vcrypto = vq->vdev->priv; in virtcrypto_dataq_callback() local
92 struct data_queue *dq = &vcrypto->data_vq[vq->index]; in virtcrypto_dataq_callback()
188 static void virtcrypto_set_affinity(struct virtio_crypto *vcrypto) in virtcrypto_set_affinity() argument
196 if (vcrypto->curr_queue == 1 || vcrypto->max_data_queues == 1) { in virtcrypto_set_affinity()
197 virtcrypto_clean_affinity(vcrypto, -1); in virtcrypto_set_affinity()
209 virtqueue_set_affinity(vcrypto->data_vq[i].vq, cpumask_of(cpu)); in virtcrypto_set_affinity()
210 if (++i >= vcrypto->max_data_queues) in virtcrypto_set_affinity()
214 vcrypto->affinity_hint_set = true; in virtcrypto_set_affinity()
247 static int virtcrypto_update_status(struct virtio_crypto *vcrypto) in virtcrypto_update_status() argument
252 virtio_cread_le(vcrypto->vdev, in virtcrypto_update_status()
260 dev_warn(&vcrypto->vdev->dev, in virtcrypto_update_status()
263 virtio_break_device(vcrypto->vdev); in virtcrypto_update_status()
267 if (vcrypto->status == status) in virtcrypto_update_status()
270 vcrypto->status = status; in virtcrypto_update_status()
272 if (vcrypto->status & VIRTIO_CRYPTO_S_HW_READY) { in virtcrypto_update_status()
273 err = virtcrypto_dev_start(vcrypto); in virtcrypto_update_status()
275 dev_err(&vcrypto->vdev->dev, in virtcrypto_update_status()
280 dev_info(&vcrypto->vdev->dev, "Accelerator device is ready\n"); in virtcrypto_update_status()
282 virtcrypto_dev_stop(vcrypto); in virtcrypto_update_status()
283 dev_info(&vcrypto->vdev->dev, "Accelerator is not ready\n"); in virtcrypto_update_status()
289 static int virtcrypto_start_crypto_engines(struct virtio_crypto *vcrypto) in virtcrypto_start_crypto_engines() argument
294 for (i = 0; i < vcrypto->max_data_queues; i++) { in virtcrypto_start_crypto_engines()
295 if (vcrypto->data_vq[i].engine) { in virtcrypto_start_crypto_engines()
296 ret = crypto_engine_start(vcrypto->data_vq[i].engine); in virtcrypto_start_crypto_engines()
306 if (vcrypto->data_vq[i].engine) in virtcrypto_start_crypto_engines()
307 crypto_engine_exit(vcrypto->data_vq[i].engine); in virtcrypto_start_crypto_engines()
312 static void virtcrypto_clear_crypto_engines(struct virtio_crypto *vcrypto) in virtcrypto_clear_crypto_engines() argument
316 for (i = 0; i < vcrypto->max_data_queues; i++) in virtcrypto_clear_crypto_engines()
317 if (vcrypto->data_vq[i].engine) in virtcrypto_clear_crypto_engines()
318 crypto_engine_exit(vcrypto->data_vq[i].engine); in virtcrypto_clear_crypto_engines()
321 static void virtcrypto_del_vqs(struct virtio_crypto *vcrypto) in virtcrypto_del_vqs() argument
323 struct virtio_device *vdev = vcrypto->vdev; in virtcrypto_del_vqs()
325 virtcrypto_clean_affinity(vcrypto, -1); in virtcrypto_del_vqs()
329 virtcrypto_free_queues(vcrypto); in virtcrypto_del_vqs()
334 struct virtio_crypto *vcrypto = in vcrypto_config_changed_work() local
337 virtcrypto_update_status(vcrypto); in vcrypto_config_changed_work()
343 struct virtio_crypto *vcrypto; in virtcrypto_probe() local
375 vcrypto = kzalloc_node(sizeof(*vcrypto), GFP_KERNEL, in virtcrypto_probe()
377 if (!vcrypto) in virtcrypto_probe()
410 err = virtcrypto_devmgr_add_dev(vcrypto); in virtcrypto_probe()
415 vcrypto->owner = THIS_MODULE; in virtcrypto_probe()
416 vcrypto = vdev->priv = vcrypto; in virtcrypto_probe()
417 vcrypto->vdev = vdev; in virtcrypto_probe()
419 spin_lock_init(&vcrypto->ctrl_lock); in virtcrypto_probe()
422 vcrypto->curr_queue = 1; in virtcrypto_probe()
423 vcrypto->max_data_queues = max_data_queues; in virtcrypto_probe()
424 vcrypto->max_cipher_key_len = max_cipher_key_len; in virtcrypto_probe()
425 vcrypto->max_auth_key_len = max_auth_key_len; in virtcrypto_probe()
426 vcrypto->max_size = max_size; in virtcrypto_probe()
427 vcrypto->crypto_services = crypto_services; in virtcrypto_probe()
428 vcrypto->cipher_algo_l = cipher_algo_l; in virtcrypto_probe()
429 vcrypto->cipher_algo_h = cipher_algo_h; in virtcrypto_probe()
430 vcrypto->mac_algo_l = mac_algo_l; in virtcrypto_probe()
431 vcrypto->mac_algo_h = mac_algo_h; in virtcrypto_probe()
432 vcrypto->hash_algo = hash_algo; in virtcrypto_probe()
433 vcrypto->aead_algo = aead_algo; in virtcrypto_probe()
434 vcrypto->akcipher_algo = akcipher_algo; in virtcrypto_probe()
438 vcrypto->max_data_queues, in virtcrypto_probe()
439 vcrypto->max_cipher_key_len, in virtcrypto_probe()
440 vcrypto->max_auth_key_len, in virtcrypto_probe()
441 vcrypto->max_size); in virtcrypto_probe()
443 err = virtcrypto_init_vqs(vcrypto); in virtcrypto_probe()
449 err = virtcrypto_start_crypto_engines(vcrypto); in virtcrypto_probe()
455 err = virtcrypto_update_status(vcrypto); in virtcrypto_probe()
459 INIT_WORK(&vcrypto->config_work, vcrypto_config_changed_work); in virtcrypto_probe()
464 virtcrypto_clear_crypto_engines(vcrypto); in virtcrypto_probe()
467 virtcrypto_del_vqs(vcrypto); in virtcrypto_probe()
469 virtcrypto_devmgr_rm_dev(vcrypto); in virtcrypto_probe()
471 kfree(vcrypto); in virtcrypto_probe()
475 static void virtcrypto_free_unused_reqs(struct virtio_crypto *vcrypto) in virtcrypto_free_unused_reqs() argument
481 for (i = 0; i < vcrypto->max_data_queues; i++) { in virtcrypto_free_unused_reqs()
482 vq = vcrypto->data_vq[i].vq; in virtcrypto_free_unused_reqs()
491 struct virtio_crypto *vcrypto = vdev->priv; in virtcrypto_remove() local
496 flush_work(&vcrypto->config_work); in virtcrypto_remove()
497 if (virtcrypto_dev_started(vcrypto)) in virtcrypto_remove()
498 virtcrypto_dev_stop(vcrypto); in virtcrypto_remove()
499 for (i = 0; i < vcrypto->max_data_queues; i++) in virtcrypto_remove()
500 tasklet_kill(&vcrypto->data_vq[i].done_task); in virtcrypto_remove()
502 virtcrypto_free_unused_reqs(vcrypto); in virtcrypto_remove()
503 virtcrypto_clear_crypto_engines(vcrypto); in virtcrypto_remove()
504 virtcrypto_del_vqs(vcrypto); in virtcrypto_remove()
505 virtcrypto_devmgr_rm_dev(vcrypto); in virtcrypto_remove()
506 kfree(vcrypto); in virtcrypto_remove()
511 struct virtio_crypto *vcrypto = vdev->priv; in virtcrypto_config_changed() local
513 schedule_work(&vcrypto->config_work); in virtcrypto_config_changed()
519 struct virtio_crypto *vcrypto = vdev->priv; in virtcrypto_freeze() local
521 flush_work(&vcrypto->config_work); in virtcrypto_freeze()
523 virtcrypto_free_unused_reqs(vcrypto); in virtcrypto_freeze()
524 if (virtcrypto_dev_started(vcrypto)) in virtcrypto_freeze()
525 virtcrypto_dev_stop(vcrypto); in virtcrypto_freeze()
527 virtcrypto_clear_crypto_engines(vcrypto); in virtcrypto_freeze()
528 virtcrypto_del_vqs(vcrypto); in virtcrypto_freeze()
534 struct virtio_crypto *vcrypto = vdev->priv; in virtcrypto_restore() local
537 err = virtcrypto_init_vqs(vcrypto); in virtcrypto_restore()
541 err = virtcrypto_start_crypto_engines(vcrypto); in virtcrypto_restore()
547 err = virtcrypto_dev_start(vcrypto); in virtcrypto_restore()
556 virtcrypto_clear_crypto_engines(vcrypto); in virtcrypto_restore()
559 virtcrypto_del_vqs(vcrypto); in virtcrypto_restore()