1730 lines
48 KiB
Diff
1730 lines
48 KiB
Diff
From 2566ea8a058f1d4f6b472f73f9b3c74c5a6be743 Mon Sep 17 00:00:00 2001
|
|
From: Wenkai Lin <linwenkai6@hisilicon.com>
|
|
Date: Thu, 9 Nov 2023 11:23:38 +0800
|
|
Subject: [PATCH 56/63] uadk_engine: support aead aes-gcm algorithm
|
|
|
|
OpenSSL EVP interface supports the AEAD mode in which
|
|
authentication and encryption and decryption are performed
|
|
at the same time, unload software computing to hardware
|
|
on the UADK engine. Currently, aes-128-gcm, aes-192-gcm,
|
|
and aes-256-gcm are supported.
|
|
|
|
Signed-off-by: Wenkai Lin <linwenkai6@hisilicon.com>
|
|
---
|
|
src/Makefile.am | 3 +-
|
|
src/uadk.h | 2 -
|
|
src/uadk_aead.c | 732 ++++++++++++++++++++++++++++++++++++++
|
|
src/uadk_async.h | 1 +
|
|
src/uadk_cipher.c | 545 ++++++++++------------------
|
|
src/uadk_cipher_adapter.c | 204 +++++++++++
|
|
src/uadk_cipher_adapter.h | 36 ++
|
|
src/uadk_engine_init.c | 24 +-
|
|
8 files changed, 1173 insertions(+), 374 deletions(-)
|
|
create mode 100644 src/uadk_aead.c
|
|
create mode 100644 src/uadk_cipher_adapter.c
|
|
create mode 100644 src/uadk_cipher_adapter.h
|
|
|
|
diff --git a/src/Makefile.am b/src/Makefile.am
|
|
index 668ba13..3806de3 100644
|
|
--- a/src/Makefile.am
|
|
+++ b/src/Makefile.am
|
|
@@ -11,7 +11,8 @@ endif #HAVE_CRYPTO3
|
|
|
|
uadk_engine_la_SOURCES=uadk_utils.c uadk_engine_init.c uadk_cipher.c \
|
|
uadk_digest.c uadk_async.c uadk_rsa.c uadk_sm2.c \
|
|
- uadk_pkey.c uadk_dh.c uadk_ec.c uadk_ecx.c
|
|
+ uadk_pkey.c uadk_dh.c uadk_ec.c uadk_ecx.c \
|
|
+ uadk_aead.c uadk_cipher_adapter.c
|
|
|
|
uadk_engine_la_LIBADD=-ldl $(WD_LIBS) -lpthread
|
|
uadk_engine_la_LDFLAGS=-module -version-number $(VERSION)
|
|
diff --git a/src/uadk.h b/src/uadk.h
|
|
index 5a98feb..4cf2c13 100644
|
|
--- a/src/uadk.h
|
|
+++ b/src/uadk.h
|
|
@@ -28,8 +28,6 @@ enum {
|
|
HW_V3,
|
|
};
|
|
|
|
-int uadk_e_bind_cipher(ENGINE *e);
|
|
-void uadk_e_destroy_cipher(void);
|
|
int uadk_e_bind_digest(ENGINE *e);
|
|
void uadk_e_destroy_digest(void);
|
|
int uadk_e_bind_rsa(ENGINE *e);
|
|
diff --git a/src/uadk_aead.c b/src/uadk_aead.c
|
|
new file mode 100644
|
|
index 0000000..64a5f5c
|
|
--- /dev/null
|
|
+++ b/src/uadk_aead.c
|
|
@@ -0,0 +1,732 @@
|
|
+/*
|
|
+ * Copyright 2023 Huawei Technologies Co.,Ltd. All rights reserved.
|
|
+ *
|
|
+ * Licensed under the Apache License, Version 2.0 (the "License");
|
|
+ * you may not use this file except in compliance with the License.
|
|
+ * You may obtain a copy of the License at
|
|
+ *
|
|
+ * http://www.apache.org/licenses/LICENSE-2.0
|
|
+ *
|
|
+ * Unless required by applicable law or agreed to in writing, software
|
|
+ * distributed under the License is distributed on an "AS IS" BASIS,
|
|
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
+ * See the License for the specific language governing permissions and
|
|
+ * limitations under the License.
|
|
+ *
|
|
+ */
|
|
+#include <stdio.h>
|
|
+#include <stdbool.h>
|
|
+#include <string.h>
|
|
+#include <dlfcn.h>
|
|
+#include <openssl/aes.h>
|
|
+#include <openssl/engine.h>
|
|
+#include <uadk/wd_aead.h>
|
|
+#include <uadk/wd_sched.h>
|
|
+#include "uadk_cipher_adapter.h"
|
|
+#include "uadk.h"
|
|
+#include "uadk_async.h"
|
|
+#include "uadk_utils.h"
|
|
+
|
|
+#define RET_FAIL -1
|
|
+#define CTX_SYNC_ENC 0
|
|
+#define CTX_SYNC_DEC 1
|
|
+#define CTX_ASYNC_ENC 2
|
|
+#define CTX_ASYNC_DEC 3
|
|
+#define CTX_NUM 4
|
|
+#define AES_GCM_CTR_LEN 4
|
|
+#define AES_GCM_BLOCK_SIZE 16
|
|
+#define AES_GCM_IV_LEN 12
|
|
+#define AES_GCM_TAG_LEN 16
|
|
+#define GCM_FLAG (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_GCM_MODE \
|
|
+ | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_AEAD_CIPHER \
|
|
+ | EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT)
|
|
+/* The max data length is 16M-512B */
|
|
+#define AEAD_BLOCK_SIZE 0xFFFE00
|
|
+
|
|
+struct aead_priv_ctx {
|
|
+ handle_t sess;
|
|
+ struct wd_aead_sess_setup setup;
|
|
+ struct wd_aead_req req;
|
|
+ unsigned char *data;
|
|
+ unsigned char iv[AES_GCM_BLOCK_SIZE];
|
|
+ unsigned char mac[AES_GCM_TAG_LEN];
|
|
+ size_t last_update_bufflen;
|
|
+};
|
|
+
|
|
+struct aead_engine {
|
|
+ struct wd_ctx_config ctx_cfg;
|
|
+ struct wd_sched sched;
|
|
+ int numa_id;
|
|
+ int pid;
|
|
+ pthread_spinlock_t lock;
|
|
+};
|
|
+
|
|
+static struct aead_engine engine;
|
|
+
|
|
+static EVP_CIPHER *uadk_aes_128_gcm;
|
|
+static EVP_CIPHER *uadk_aes_192_gcm;
|
|
+static EVP_CIPHER *uadk_aes_256_gcm;
|
|
+
|
|
+static int uadk_e_aead_env_poll(void *ctx)
|
|
+{
|
|
+ __u64 rx_cnt = 0;
|
|
+ __u32 recv = 0;
|
|
+ /* Poll one packet currently */
|
|
+ int expt = 1;
|
|
+ int ret;
|
|
+
|
|
+ do {
|
|
+ ret = wd_aead_poll(expt, &recv);
|
|
+ if (ret < 0 || recv == expt)
|
|
+ return ret;
|
|
+ rx_cnt++;
|
|
+ } while (rx_cnt < ENGINE_RECV_MAX_CNT);
|
|
+
|
|
+ fprintf(stderr, "failed to poll msg: timeout!\n");
|
|
+
|
|
+ return -ETIMEDOUT;
|
|
+}
|
|
+
|
|
+static int uadk_e_aead_poll(void *ctx)
|
|
+{
|
|
+ struct aead_priv_ctx *priv = (struct aead_priv_ctx *) ctx;
|
|
+ __u64 rx_cnt = 0;
|
|
+ __u32 recv = 0;
|
|
+ /* Poll one packet currently */
|
|
+ int expt = 1;
|
|
+ int ret, idx;
|
|
+
|
|
+ if (priv->req.op_type == WD_CIPHER_ENCRYPTION_DIGEST)
|
|
+ idx = CTX_ASYNC_ENC;
|
|
+ else
|
|
+ idx = CTX_ASYNC_DEC;
|
|
+
|
|
+ do {
|
|
+ ret = wd_aead_poll_ctx(idx, expt, &recv);
|
|
+ if (!ret && recv == expt)
|
|
+ return 0;
|
|
+ else if (ret == -EAGAIN)
|
|
+ rx_cnt++;
|
|
+ else
|
|
+ return RET_FAIL;
|
|
+ } while (rx_cnt < ENGINE_RECV_MAX_CNT);
|
|
+
|
|
+ fprintf(stderr, "failed to recv msg: timeout!\n");
|
|
+
|
|
+ return -ETIMEDOUT;
|
|
+}
|
|
+
|
|
+static handle_t sched_single_aead_init(handle_t h_sched_ctx, void *sched_param)
|
|
+{
|
|
+ struct sched_params *param = (struct sched_params *)sched_param;
|
|
+ struct sched_params *skey;
|
|
+
|
|
+ skey = malloc(sizeof(struct sched_params));
|
|
+ if (!skey) {
|
|
+ fprintf(stderr, "fail to alloc aead sched key!\n");
|
|
+ return (handle_t)0;
|
|
+ }
|
|
+
|
|
+ skey->numa_id = param->numa_id;
|
|
+ skey->type = param->type;
|
|
+
|
|
+ return (handle_t)skey;
|
|
+}
|
|
+
|
|
+static __u32 sched_single_pick_next_ctx(handle_t sched_ctx, void *sched_key, const int sched_mode)
|
|
+{
|
|
+ struct sched_params *key = (struct sched_params *)sched_key;
|
|
+
|
|
+ if (sched_mode) {
|
|
+ if (key->type == WD_CIPHER_ENCRYPTION_DIGEST)
|
|
+ return CTX_ASYNC_ENC;
|
|
+ else
|
|
+ return CTX_ASYNC_DEC;
|
|
+ } else {
|
|
+ if (key->type == WD_CIPHER_ENCRYPTION_DIGEST)
|
|
+ return CTX_SYNC_ENC;
|
|
+ else
|
|
+ return CTX_SYNC_DEC;
|
|
+ }
|
|
+}
|
|
+
|
|
+static int sched_single_poll_policy(handle_t h_sched_ctx, __u32 expect, __u32 *count)
|
|
+{
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int uadk_e_wd_aead_cipher_env_init(struct uacce_dev *dev)
|
|
+{
|
|
+ int ret;
|
|
+
|
|
+ ret = uadk_e_set_env("WD_AEAD_CTX_NUM", dev->numa_id);
|
|
+ if (ret)
|
|
+ return ret;
|
|
+
|
|
+ ret = wd_aead_env_init(NULL);
|
|
+
|
|
+ async_register_poll_fn(ASYNC_TASK_AEAD, uadk_e_aead_env_poll);
|
|
+
|
|
+ return ret;
|
|
+}
|
|
+
|
|
+static int uadk_e_wd_aead_cipher_init(struct uacce_dev *dev)
|
|
+{
|
|
+ __u32 i, j;
|
|
+ int ret;
|
|
+
|
|
+ engine.numa_id = dev->numa_id;
|
|
+
|
|
+ ret = uadk_e_is_env_enabled("aead");
|
|
+ if (ret)
|
|
+ return uadk_e_wd_aead_cipher_env_init(dev);
|
|
+
|
|
+ memset(&engine.ctx_cfg, 0, sizeof(struct wd_ctx_config));
|
|
+ engine.ctx_cfg.ctx_num = CTX_NUM;
|
|
+ engine.ctx_cfg.ctxs = calloc(CTX_NUM, sizeof(struct wd_ctx));
|
|
+ if (!engine.ctx_cfg.ctxs)
|
|
+ return -ENOMEM;
|
|
+
|
|
+ for (i = 0; i < CTX_NUM; i++) {
|
|
+ engine.ctx_cfg.ctxs[i].ctx = wd_request_ctx(dev);
|
|
+ if (!engine.ctx_cfg.ctxs[i].ctx) {
|
|
+ ret = -ENOMEM;
|
|
+ goto err_freectx;
|
|
+ }
|
|
+ }
|
|
+
|
|
+ engine.ctx_cfg.ctxs[CTX_SYNC_ENC].op_type = CTX_TYPE_ENCRYPT;
|
|
+ engine.ctx_cfg.ctxs[CTX_SYNC_DEC].op_type = CTX_TYPE_DECRYPT;
|
|
+ engine.ctx_cfg.ctxs[CTX_SYNC_ENC].ctx_mode = CTX_MODE_SYNC;
|
|
+ engine.ctx_cfg.ctxs[CTX_SYNC_DEC].ctx_mode = CTX_MODE_SYNC;
|
|
+
|
|
+ engine.ctx_cfg.ctxs[CTX_ASYNC_ENC].op_type = CTX_TYPE_ENCRYPT;
|
|
+ engine.ctx_cfg.ctxs[CTX_ASYNC_DEC].op_type = CTX_TYPE_DECRYPT;
|
|
+ engine.ctx_cfg.ctxs[CTX_ASYNC_ENC].ctx_mode = CTX_MODE_ASYNC;
|
|
+ engine.ctx_cfg.ctxs[CTX_ASYNC_DEC].ctx_mode = CTX_MODE_ASYNC;
|
|
+
|
|
+ engine.sched.name = "sched_single";
|
|
+ engine.sched.pick_next_ctx = sched_single_pick_next_ctx;
|
|
+ engine.sched.poll_policy = sched_single_poll_policy;
|
|
+ engine.sched.sched_init = sched_single_aead_init;
|
|
+
|
|
+ ret = wd_aead_init(&engine.ctx_cfg, &engine.sched);
|
|
+ if (ret)
|
|
+ goto err_freectx;
|
|
+
|
|
+ async_register_poll_fn(ASYNC_TASK_AEAD, uadk_e_aead_poll);
|
|
+ return ret;
|
|
+
|
|
+err_freectx:
|
|
+ for (j = 0; j < i; j++)
|
|
+ wd_release_ctx(engine.ctx_cfg.ctxs[j].ctx);
|
|
+
|
|
+ free(engine.ctx_cfg.ctxs);
|
|
+
|
|
+ return ret;
|
|
+}
|
|
+
|
|
+static int uadk_e_init_aead_cipher(void)
|
|
+{
|
|
+ struct uacce_dev *dev;
|
|
+ int ret;
|
|
+
|
|
+ if (engine.pid != getpid()) {
|
|
+ pthread_spin_lock(&engine.lock);
|
|
+ if (engine.pid == getpid()) {
|
|
+ pthread_spin_unlock(&engine.lock);
|
|
+ return 1;
|
|
+ }
|
|
+
|
|
+ dev = wd_get_accel_dev("aead");
|
|
+ if (!dev) {
|
|
+ pthread_spin_unlock(&engine.lock);
|
|
+ fprintf(stderr, "failed to get device for aead.\n");
|
|
+ return 0;
|
|
+ }
|
|
+
|
|
+ ret = uadk_e_wd_aead_cipher_init(dev);
|
|
+ if (ret < 0) {
|
|
+ pthread_spin_unlock(&engine.lock);
|
|
+ fprintf(stderr, "failed to initiate aead cipher.\n");
|
|
+ free(dev);
|
|
+ return 0;
|
|
+ }
|
|
+
|
|
+ engine.pid = getpid();
|
|
+ pthread_spin_unlock(&engine.lock);
|
|
+ free(dev);
|
|
+ }
|
|
+
|
|
+ return 1;
|
|
+}
|
|
+
|
|
+static int uadk_e_ctx_init(struct aead_priv_ctx *priv, const unsigned char *ckey, int ckey_len)
|
|
+{
|
|
+ struct sched_params params = {0};
|
|
+ int ret;
|
|
+
|
|
+ ret = uadk_e_init_aead_cipher();
|
|
+ if (unlikely(!ret)) {
|
|
+ fprintf(stderr, "uadk failed to init aead HW!\n");
|
|
+ return 0;
|
|
+ }
|
|
+
|
|
+ params.type = priv->req.op_type;
|
|
+ ret = uadk_e_is_env_enabled("aead");
|
|
+ if (ret)
|
|
+ params.type = 0;
|
|
+
|
|
+ params.numa_id = engine.numa_id;
|
|
+ priv->setup.sched_param = ¶ms;
|
|
+ if (!priv->sess) {
|
|
+ priv->sess = wd_aead_alloc_sess(&priv->setup);
|
|
+ if (!priv->sess) {
|
|
+ fprintf(stderr, "uadk engine failed to alloc aead session!\n");
|
|
+ return 0;
|
|
+ }
|
|
+ ret = wd_aead_set_authsize(priv->sess, AES_GCM_TAG_LEN);
|
|
+ if (ret < 0) {
|
|
+ fprintf(stderr, "uadk engine failed to set authsize!\n");
|
|
+ goto out;
|
|
+ }
|
|
+
|
|
+ ret = wd_aead_set_ckey(priv->sess, ckey, ckey_len);
|
|
+ if (ret) {
|
|
+ fprintf(stderr, "uadk engine failed to set ckey!\n");
|
|
+ goto out;
|
|
+ }
|
|
+ priv->data = malloc(AEAD_BLOCK_SIZE << 1);
|
|
+ if (unlikely(!priv->data)) {
|
|
+ fprintf(stderr, "uadk engine failed to alloc data!\n");
|
|
+ goto out;
|
|
+ }
|
|
+ }
|
|
+
|
|
+ return 1;
|
|
+out:
|
|
+ wd_aead_free_sess(priv->sess);
|
|
+ priv->sess = 0;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int uadk_e_aes_gcm_init(EVP_CIPHER_CTX *ctx, const unsigned char *ckey,
|
|
+ const unsigned char *iv, int enc)
|
|
+{
|
|
+ struct aead_priv_ctx *priv =
|
|
+ (struct aead_priv_ctx *)EVP_CIPHER_CTX_get_cipher_data(ctx);
|
|
+ int ret, ckey_len;
|
|
+
|
|
+ if (unlikely(!ckey))
|
|
+ return 1;
|
|
+
|
|
+ if (iv)
|
|
+ memcpy(priv->iv, iv, AES_GCM_IV_LEN);
|
|
+
|
|
+ priv->setup.calg = WD_CIPHER_AES;
|
|
+ priv->setup.cmode = WD_CIPHER_GCM;
|
|
+ priv->setup.dalg = 0;
|
|
+ priv->setup.dmode = 0;
|
|
+
|
|
+ priv->last_update_bufflen = 0;
|
|
+ priv->req.assoc_bytes = 0;
|
|
+ priv->req.out_bytes = 0;
|
|
+ priv->req.data_fmt = WD_FLAT_BUF;
|
|
+
|
|
+ priv->req.iv = priv->iv;
|
|
+ priv->req.iv_bytes = AES_GCM_IV_LEN;
|
|
+ memset(priv->iv + AES_GCM_IV_LEN, 0, AES_GCM_CTR_LEN);
|
|
+
|
|
+ priv->req.mac = priv->mac;
|
|
+ priv->req.mac_bytes = AES_GCM_TAG_LEN;
|
|
+
|
|
+ if (enc)
|
|
+ priv->req.op_type = WD_CIPHER_ENCRYPTION_DIGEST;
|
|
+ else
|
|
+ priv->req.op_type = WD_CIPHER_DECRYPTION_DIGEST;
|
|
+
|
|
+ ckey_len = EVP_CIPHER_CTX_key_length(ctx);
|
|
+ ret = uadk_e_ctx_init(priv, ckey, ckey_len);
|
|
+ if (!ret)
|
|
+ return 0;
|
|
+
|
|
+ return 1;
|
|
+}
|
|
+
|
|
+static int uadk_e_aes_gcm_cleanup(EVP_CIPHER_CTX *ctx)
|
|
+{
|
|
+ struct aead_priv_ctx *priv =
|
|
+ (struct aead_priv_ctx *)EVP_CIPHER_CTX_get_cipher_data(ctx);
|
|
+
|
|
+ if (priv->sess) {
|
|
+ wd_aead_free_sess(priv->sess);
|
|
+ priv->sess = 0;
|
|
+ }
|
|
+
|
|
+ if (priv->data) {
|
|
+ free(priv->data);
|
|
+ priv->data = NULL;
|
|
+ }
|
|
+
|
|
+ return 1;
|
|
+}
|
|
+
|
|
+static int uadk_e_aes_gcm_set_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg, void *ptr)
|
|
+{
|
|
+ struct aead_priv_ctx *priv =
|
|
+ (struct aead_priv_ctx *)EVP_CIPHER_CTX_get_cipher_data(ctx);
|
|
+ int enc = EVP_CIPHER_CTX_encrypting(ctx);
|
|
+
|
|
+ switch (type) {
|
|
+ case EVP_CTRL_INIT:
|
|
+ priv->req.iv_bytes = 0;
|
|
+ return 1;
|
|
+ case EVP_CTRL_GET_IVLEN:
|
|
+ *(int *)ptr = priv->req.iv_bytes;
|
|
+ return 1;
|
|
+ case EVP_CTRL_GCM_SET_IVLEN:
|
|
+ if (arg != AES_GCM_IV_LEN) {
|
|
+ fprintf(stderr, "gcm only support 12 bytes.\n");
|
|
+ return 0;
|
|
+ }
|
|
+ return 1;
|
|
+ case EVP_CTRL_GCM_GET_TAG:
|
|
+ if (arg <= 0 || arg > AES_GCM_TAG_LEN) {
|
|
+ fprintf(stderr, "TAG length invalid.\n");
|
|
+ return 0;
|
|
+ }
|
|
+
|
|
+ if (EVP_CIPHER_CTX_buf_noconst(ctx) == NULL || ptr == NULL) {
|
|
+ fprintf(stderr, "ctx memory pointer is invalid.\n");
|
|
+ return 0;
|
|
+ }
|
|
+
|
|
+ memcpy(ptr, EVP_CIPHER_CTX_buf_noconst(ctx), arg);
|
|
+ return 1;
|
|
+ case EVP_CTRL_GCM_SET_TAG:
|
|
+ if (arg != AES_GCM_TAG_LEN || enc) {
|
|
+ fprintf(stderr, "cannot set tag when encrypt or arg is invalid.\n");
|
|
+ return 0;
|
|
+ }
|
|
+
|
|
+ if (EVP_CIPHER_CTX_buf_noconst(ctx) == NULL || ptr == NULL) {
|
|
+ fprintf(stderr, "ctx memory pointer is invalid.\n");
|
|
+ return 0;
|
|
+ }
|
|
+
|
|
+ memcpy(EVP_CIPHER_CTX_buf_noconst(ctx), ptr, AES_GCM_TAG_LEN);
|
|
+ return 1;
|
|
+ default:
|
|
+ fprintf(stderr, "unsupported ctrl type: %d\n", type);
|
|
+ return 0;
|
|
+ }
|
|
+}
|
|
+
|
|
+static int uadk_e_do_aes_gcm_first(EVP_CIPHER_CTX *ctx, unsigned char *out,
|
|
+ const unsigned char *in, size_t inlen)
|
|
+{
|
|
+ struct aead_priv_ctx *priv =
|
|
+ (struct aead_priv_ctx *)EVP_CIPHER_CTX_get_cipher_data(ctx);
|
|
+ int ret;
|
|
+
|
|
+ priv->req.assoc_bytes = inlen;
|
|
+
|
|
+ if (ASYNC_get_current_job()) {
|
|
+ memcpy(priv->data + priv->last_update_bufflen, in, inlen);
|
|
+ priv->last_update_bufflen += inlen;
|
|
+ return 1;
|
|
+ }
|
|
+
|
|
+ priv->req.src = (unsigned char *)in;
|
|
+ priv->req.msg_state = AEAD_MSG_FIRST;
|
|
+
|
|
+ ret = wd_do_aead_sync(priv->sess, &priv->req);
|
|
+ if (ret < 0) {
|
|
+ fprintf(stderr, "do sec aead first operation failed, ret:%d!\n", ret);
|
|
+ return RET_FAIL;
|
|
+ }
|
|
+
|
|
+ return 1;
|
|
+}
|
|
+
|
|
+static int uadk_e_hw_update(struct aead_priv_ctx *priv, unsigned char *out,
|
|
+ unsigned char *in, size_t inlen)
|
|
+{
|
|
+ int ret;
|
|
+
|
|
+ priv->req.src = in;
|
|
+ priv->req.dst = out;
|
|
+ priv->req.in_bytes = inlen;
|
|
+ priv->req.msg_state = AEAD_MSG_MIDDLE;
|
|
+ ret = wd_do_aead_sync(priv->sess, &priv->req);
|
|
+ if (ret < 0) {
|
|
+ fprintf(stderr, "do sec aead update operation failed, ret:%d!\n", ret);
|
|
+ return RET_FAIL;
|
|
+ }
|
|
+
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int uadk_e_cache_data(struct aead_priv_ctx *priv, const unsigned char *in, size_t inlen)
|
|
+{
|
|
+ if (ASYNC_get_current_job() || !priv->req.assoc_bytes) {
|
|
+ if (priv->last_update_bufflen + inlen > AEAD_BLOCK_SIZE) {
|
|
+ fprintf(stderr, "aead input data length is too long!\n");
|
|
+ return RET_FAIL;
|
|
+ }
|
|
+ memcpy(priv->data + priv->last_update_bufflen, in, inlen);
|
|
+ priv->last_update_bufflen += inlen;
|
|
+ return 0;
|
|
+ }
|
|
+
|
|
+ return 1;
|
|
+}
|
|
+
|
|
+static int uadk_e_do_aes_gcm_update(EVP_CIPHER_CTX *ctx, unsigned char *out,
|
|
+ const unsigned char *in, size_t inlen)
|
|
+{
|
|
+ struct aead_priv_ctx *priv =
|
|
+ (struct aead_priv_ctx *)EVP_CIPHER_CTX_get_cipher_data(ctx);
|
|
+ int ret;
|
|
+
|
|
+ ret = uadk_e_cache_data(priv, in, inlen);
|
|
+ if (ret <= 0)
|
|
+ return ret;
|
|
+
|
|
+ ret = uadk_e_hw_update(priv, out, in, inlen);
|
|
+ if (ret < 0)
|
|
+ return RET_FAIL;
|
|
+
|
|
+ return inlen;
|
|
+}
|
|
+
|
|
+static void *uadk_e_aead_cb(struct wd_aead_req *req, void *data)
|
|
+{
|
|
+ struct uadk_e_cb_info *cb_param;
|
|
+ struct async_op *op;
|
|
+
|
|
+ if (!req)
|
|
+ return NULL;
|
|
+
|
|
+ cb_param = req->cb_param;
|
|
+ if (!cb_param)
|
|
+ return NULL;
|
|
+
|
|
+ op = cb_param->op;
|
|
+ if (op && op->job && !op->done) {
|
|
+ op->done = 1;
|
|
+ async_free_poll_task(op->idx, 1);
|
|
+ async_wake_job(op->job);
|
|
+ }
|
|
+
|
|
+ return NULL;
|
|
+}
|
|
+
|
|
+static int do_aead_async(struct aead_priv_ctx *priv, struct async_op *op)
|
|
+{
|
|
+ struct uadk_e_cb_info *cb_param;
|
|
+ int ret = 0;
|
|
+ int idx;
|
|
+
|
|
+ priv->req.in_bytes = priv->last_update_bufflen - priv->req.assoc_bytes;
|
|
+ priv->req.dst = priv->data + AEAD_BLOCK_SIZE;
|
|
+
|
|
+ cb_param = malloc(sizeof(struct uadk_e_cb_info));
|
|
+ if (!cb_param) {
|
|
+ fprintf(stderr, "failed to alloc cb_param.\n");
|
|
+ return ret;
|
|
+ }
|
|
+
|
|
+ cb_param->op = op;
|
|
+ cb_param->priv = priv;
|
|
+ priv->req.cb = uadk_e_aead_cb;
|
|
+ priv->req.cb_param = cb_param;
|
|
+
|
|
+ ret = async_get_free_task(&idx);
|
|
+ if (!ret)
|
|
+ goto free_cb_param;
|
|
+
|
|
+ op->idx = idx;
|
|
+ do {
|
|
+ ret = wd_do_aead_async(priv->sess, &priv->req);
|
|
+ if (ret < 0 && ret != -EBUSY) {
|
|
+ fprintf(stderr, "do sec aead async failed.\n");
|
|
+ async_free_poll_task(op->idx, 0);
|
|
+ ret = 0;
|
|
+ goto free_cb_param;
|
|
+ }
|
|
+ } while (ret == -EBUSY);
|
|
+
|
|
+ ret = async_pause_job(priv, op, ASYNC_TASK_AEAD);
|
|
+
|
|
+free_cb_param:
|
|
+ free(cb_param);
|
|
+ return ret;
|
|
+}
|
|
+
|
|
+static int uadk_e_do_aes_gcm_final(EVP_CIPHER_CTX *ctx, unsigned char *out,
|
|
+ const unsigned char *in, size_t inlen)
|
|
+{
|
|
+ struct aead_priv_ctx *priv =
|
|
+ (struct aead_priv_ctx *)EVP_CIPHER_CTX_get_cipher_data(ctx);
|
|
+ unsigned char *ctx_buf = EVP_CIPHER_CTX_buf_noconst(ctx);
|
|
+ struct async_op *op;
|
|
+ int ret, enc;
|
|
+
|
|
+ op = malloc(sizeof(struct async_op));
|
|
+ if (!op)
|
|
+ return RET_FAIL;
|
|
+
|
|
+ ret = async_setup_async_event_notification(op);
|
|
+ if (unlikely(!ret)) {
|
|
+ fprintf(stderr, "failed to setup async event notification.\n");
|
|
+ free(op);
|
|
+ return RET_FAIL;
|
|
+ }
|
|
+
|
|
+ if (priv->req.assoc_bytes && !op->job)
|
|
+ priv->req.msg_state = AEAD_MSG_END;
|
|
+ else
|
|
+ priv->req.msg_state = AEAD_MSG_BLOCK;
|
|
+
|
|
+ enc = EVP_CIPHER_CTX_encrypting(ctx);
|
|
+ if (!enc)
|
|
+ memcpy(priv->req.mac, ctx_buf, AES_GCM_TAG_LEN);
|
|
+
|
|
+ priv->req.src = priv->data;
|
|
+ if (!op->job) {
|
|
+ priv->req.in_bytes = priv->last_update_bufflen;
|
|
+ priv->req.dst = out;
|
|
+ ret = wd_do_aead_sync(priv->sess, &priv->req);
|
|
+ if (ret < 0) {
|
|
+ fprintf(stderr, "do sec aead final operation failed, ret: %d!\n", ret);
|
|
+ goto out;
|
|
+ }
|
|
+ } else {
|
|
+ ret = do_aead_async(priv, op);
|
|
+ if (!ret)
|
|
+ goto out;
|
|
+
|
|
+ memcpy(out, priv->req.dst + priv->req.assoc_bytes, priv->req.in_bytes);
|
|
+ }
|
|
+
|
|
+ if (enc)
|
|
+ memcpy(ctx_buf, priv->req.mac, AES_GCM_TAG_LEN);
|
|
+
|
|
+ priv->last_update_bufflen = 0;
|
|
+
|
|
+ free(op);
|
|
+ return priv->req.in_bytes;
|
|
+
|
|
+out:
|
|
+ (void)async_clear_async_event_notification();
|
|
+ free(op);
|
|
+ return RET_FAIL;
|
|
+}
|
|
+
|
|
+static int uadk_e_do_aes_gcm(EVP_CIPHER_CTX *ctx, unsigned char *out,
|
|
+ const unsigned char *in, size_t inlen)
|
|
+{
|
|
+ int ret;
|
|
+
|
|
+ if (in) {
|
|
+ if (out == NULL)
|
|
+ return uadk_e_do_aes_gcm_first(ctx, out, in, inlen);
|
|
+
|
|
+ return uadk_e_do_aes_gcm_update(ctx, out, in, inlen);
|
|
+ }
|
|
+
|
|
+ return uadk_e_do_aes_gcm_final(ctx, out, NULL, 0);
|
|
+}
|
|
+
|
|
+#define UADK_AEAD_DESCR(name, block_size, key_size, iv_len, flags, ctx_size, \
|
|
+ init, cipher, cleanup, set_params, get_params, ctrl) \
|
|
+do {\
|
|
+ uadk_##name = EVP_CIPHER_meth_new(NID_##name, block_size, key_size); \
|
|
+ if (uadk_##name == 0 || \
|
|
+ !EVP_CIPHER_meth_set_iv_length(uadk_##name, iv_len) || \
|
|
+ !EVP_CIPHER_meth_set_flags(uadk_##name, flags) || \
|
|
+ !EVP_CIPHER_meth_set_impl_ctx_size(uadk_##name, ctx_size) || \
|
|
+ !EVP_CIPHER_meth_set_init(uadk_##name, init) || \
|
|
+ !EVP_CIPHER_meth_set_do_cipher(uadk_##name, cipher) || \
|
|
+ !EVP_CIPHER_meth_set_cleanup(uadk_##name, cleanup) || \
|
|
+ !EVP_CIPHER_meth_set_set_asn1_params(uadk_##name, set_params) || \
|
|
+ !EVP_CIPHER_meth_set_get_asn1_params(uadk_##name, get_params) || \
|
|
+ !EVP_CIPHER_meth_set_ctrl(uadk_##name, ctrl)) \
|
|
+ return 0;\
|
|
+} while (0)
|
|
+
|
|
+EVP_CIPHER *uadk_create_gcm_cipher_meth(int nid)
|
|
+{
|
|
+ EVP_CIPHER *aead = NULL;
|
|
+
|
|
+ switch (nid) {
|
|
+ case NID_aes_128_gcm:
|
|
+ UADK_AEAD_DESCR(aes_128_gcm, AES_GCM_BLOCK_SIZE, 16, AES_GCM_IV_LEN,
|
|
+ GCM_FLAG, sizeof(struct aead_priv_ctx),
|
|
+ uadk_e_aes_gcm_init, uadk_e_do_aes_gcm, uadk_e_aes_gcm_cleanup,
|
|
+ (EVP_CIPH_FLAG_DEFAULT_ASN1 ? NULL : EVP_CIPHER_set_asn1_iv),
|
|
+ (EVP_CIPH_FLAG_DEFAULT_ASN1 ? NULL : EVP_CIPHER_get_asn1_iv),
|
|
+ uadk_e_aes_gcm_set_ctrl);
|
|
+ aead = uadk_aes_128_gcm;
|
|
+ break;
|
|
+ case NID_aes_192_gcm:
|
|
+ UADK_AEAD_DESCR(aes_192_gcm, AES_GCM_BLOCK_SIZE, 24, AES_GCM_IV_LEN,
|
|
+ GCM_FLAG, sizeof(struct aead_priv_ctx),
|
|
+ uadk_e_aes_gcm_init, uadk_e_do_aes_gcm, uadk_e_aes_gcm_cleanup,
|
|
+ (EVP_CIPH_FLAG_DEFAULT_ASN1 ? NULL : EVP_CIPHER_set_asn1_iv),
|
|
+ (EVP_CIPH_FLAG_DEFAULT_ASN1 ? NULL : EVP_CIPHER_get_asn1_iv),
|
|
+ uadk_e_aes_gcm_set_ctrl);
|
|
+ aead = uadk_aes_192_gcm;
|
|
+ break;
|
|
+ case NID_aes_256_gcm:
|
|
+ UADK_AEAD_DESCR(aes_256_gcm, AES_GCM_BLOCK_SIZE, 32, AES_GCM_IV_LEN,
|
|
+ GCM_FLAG, sizeof(struct aead_priv_ctx),
|
|
+ uadk_e_aes_gcm_init, uadk_e_do_aes_gcm, uadk_e_aes_gcm_cleanup,
|
|
+ (EVP_CIPH_FLAG_DEFAULT_ASN1 ? NULL : EVP_CIPHER_set_asn1_iv),
|
|
+ (EVP_CIPH_FLAG_DEFAULT_ASN1 ? NULL : EVP_CIPHER_get_asn1_iv),
|
|
+ uadk_e_aes_gcm_set_ctrl);
|
|
+ aead = uadk_aes_256_gcm;
|
|
+ break;
|
|
+ default:
|
|
+ aead = NULL;
|
|
+ break;
|
|
+ }
|
|
+
|
|
+ return aead;
|
|
+}
|
|
+
|
|
+static void destroy_aead(struct engine_cipher_info *info, int num)
|
|
+{
|
|
+ __u32 i;
|
|
+
|
|
+ for (i = 0; i < num; i++) {
|
|
+ if (info[i].cipher != NULL) {
|
|
+ EVP_CIPHER_meth_free(info[i].cipher);
|
|
+ info[i].cipher = NULL;
|
|
+ }
|
|
+ }
|
|
+}
|
|
+
|
|
+void uadk_e_destroy_aead(struct engine_cipher_info *info, int num)
|
|
+{
|
|
+ __u32 i;
|
|
+ int ret;
|
|
+
|
|
+ if (engine.pid == getpid()) {
|
|
+ ret = uadk_e_is_env_enabled("aead");
|
|
+ if (ret) {
|
|
+ wd_aead_env_uninit();
|
|
+ } else {
|
|
+ wd_aead_uninit();
|
|
+ for (i = 0; i < engine.ctx_cfg.ctx_num; i++)
|
|
+ wd_release_ctx(engine.ctx_cfg.ctxs[i].ctx);
|
|
+
|
|
+ free(engine.ctx_cfg.ctxs);
|
|
+ }
|
|
+ engine.pid = 0;
|
|
+ }
|
|
+
|
|
+ pthread_spin_destroy(&engine.lock);
|
|
+ destroy_aead(info, num);
|
|
+}
|
|
diff --git a/src/uadk_async.h b/src/uadk_async.h
|
|
index 1208c30..678e392 100644
|
|
--- a/src/uadk_async.h
|
|
+++ b/src/uadk_async.h
|
|
@@ -41,6 +41,7 @@ typedef int (*async_recv_t)(void *ctx);
|
|
enum task_type {
|
|
ASYNC_TASK_CIPHER,
|
|
ASYNC_TASK_DIGEST,
|
|
+ ASYNC_TASK_AEAD,
|
|
ASYNC_TASK_RSA,
|
|
ASYNC_TASK_DH,
|
|
ASYNC_TASK_ECC,
|
|
diff --git a/src/uadk_cipher.c b/src/uadk_cipher.c
|
|
index ed25787..73be09d 100644
|
|
--- a/src/uadk_cipher.c
|
|
+++ b/src/uadk_cipher.c
|
|
@@ -25,6 +25,7 @@
|
|
#include <uadk/wd_sched.h>
|
|
#include "uadk.h"
|
|
#include "uadk_async.h"
|
|
+#include "uadk_cipher_adapter.h"
|
|
|
|
#define UADK_DO_SOFT (-0xE0)
|
|
#define CTX_SYNC_ENC 0
|
|
@@ -74,54 +75,8 @@ struct cipher_info {
|
|
__u32 out_bytes;
|
|
};
|
|
|
|
-static int platform;
|
|
-
|
|
#define SMALL_PACKET_OFFLOAD_THRESHOLD_DEFAULT 192
|
|
|
|
-static int cipher_hw_v2_nids[] = {
|
|
- NID_aes_128_cbc,
|
|
- NID_aes_192_cbc,
|
|
- NID_aes_256_cbc,
|
|
- NID_aes_128_ecb,
|
|
- NID_aes_192_ecb,
|
|
- NID_aes_256_ecb,
|
|
- NID_aes_128_xts,
|
|
- NID_aes_256_xts,
|
|
- NID_sm4_cbc,
|
|
- NID_des_ede3_cbc,
|
|
- NID_des_ede3_ecb,
|
|
- NID_sm4_ecb,
|
|
- 0,
|
|
-};
|
|
-
|
|
-static int cipher_hw_v3_nids[] = {
|
|
- NID_aes_128_cbc,
|
|
- NID_aes_192_cbc,
|
|
- NID_aes_256_cbc,
|
|
- NID_aes_128_ctr,
|
|
- NID_aes_192_ctr,
|
|
- NID_aes_256_ctr,
|
|
- NID_aes_128_ecb,
|
|
- NID_aes_192_ecb,
|
|
- NID_aes_256_ecb,
|
|
- NID_aes_128_xts,
|
|
- NID_aes_256_xts,
|
|
- NID_sm4_cbc,
|
|
- NID_sm4_ecb,
|
|
- NID_des_ede3_cbc,
|
|
- NID_des_ede3_ecb,
|
|
- NID_aes_128_cfb128,
|
|
- NID_aes_192_cfb128,
|
|
- NID_aes_256_cfb128,
|
|
- NID_aes_128_ofb128,
|
|
- NID_aes_192_ofb128,
|
|
- NID_aes_256_ofb128,
|
|
- NID_sm4_cfb128,
|
|
- NID_sm4_ofb128,
|
|
- NID_sm4_ctr,
|
|
- 0,
|
|
-};
|
|
-
|
|
static EVP_CIPHER *uadk_aes_128_cbc;
|
|
static EVP_CIPHER *uadk_aes_192_cbc;
|
|
static EVP_CIPHER *uadk_aes_256_cbc;
|
|
@@ -328,130 +283,6 @@ static void uadk_e_cipher_sw_cleanup(EVP_CIPHER_CTX *ctx)
|
|
}
|
|
}
|
|
|
|
-static int uadk_get_accel_platform(char *alg_name)
|
|
-{
|
|
- struct uacce_dev *dev;
|
|
-
|
|
- dev = wd_get_accel_dev(alg_name);
|
|
- if (dev == NULL)
|
|
- return 0;
|
|
-
|
|
- if (!strcmp(dev->api, "hisi_qm_v2"))
|
|
- platform = HW_V2;
|
|
- else
|
|
- platform = HW_V3;
|
|
- free(dev);
|
|
-
|
|
- return 1;
|
|
-}
|
|
-
|
|
-static int uadk_e_engine_ciphers(ENGINE *e, const EVP_CIPHER **cipher,
|
|
- const int **nids, int nid)
|
|
-{
|
|
- int *cipher_nids;
|
|
- __u32 size, i;
|
|
- int ret = 1;
|
|
-
|
|
- if (platform == HW_V2) {
|
|
- size = (sizeof(cipher_hw_v2_nids) - 1) / sizeof(int);
|
|
- cipher_nids = cipher_hw_v2_nids;
|
|
- } else {
|
|
- size = (sizeof(cipher_hw_v3_nids) - 1) / sizeof(int);
|
|
- cipher_nids = cipher_hw_v3_nids;
|
|
- }
|
|
-
|
|
- if (!cipher) {
|
|
- *nids = cipher_nids;
|
|
- return size;
|
|
- }
|
|
-
|
|
- for (i = 0; i < size; i++) {
|
|
- if (nid == cipher_nids[i])
|
|
- break;
|
|
- }
|
|
-
|
|
- switch (nid) {
|
|
- case NID_aes_128_cbc:
|
|
- *cipher = uadk_aes_128_cbc;
|
|
- break;
|
|
- case NID_aes_192_cbc:
|
|
- *cipher = uadk_aes_192_cbc;
|
|
- break;
|
|
- case NID_aes_256_cbc:
|
|
- *cipher = uadk_aes_256_cbc;
|
|
- break;
|
|
- case NID_aes_128_ctr:
|
|
- *cipher = uadk_aes_128_ctr;
|
|
- break;
|
|
- case NID_aes_192_ctr:
|
|
- *cipher = uadk_aes_192_ctr;
|
|
- break;
|
|
- case NID_aes_256_ctr:
|
|
- *cipher = uadk_aes_256_ctr;
|
|
- break;
|
|
- case NID_aes_128_ecb:
|
|
- *cipher = uadk_aes_128_ecb;
|
|
- break;
|
|
- case NID_aes_192_ecb:
|
|
- *cipher = uadk_aes_192_ecb;
|
|
- break;
|
|
- case NID_aes_256_ecb:
|
|
- *cipher = uadk_aes_256_ecb;
|
|
- break;
|
|
- case NID_aes_128_xts:
|
|
- *cipher = uadk_aes_128_xts;
|
|
- break;
|
|
- case NID_aes_256_xts:
|
|
- *cipher = uadk_aes_256_xts;
|
|
- break;
|
|
- case NID_sm4_cbc:
|
|
- *cipher = uadk_sm4_cbc;
|
|
- break;
|
|
- case NID_sm4_ecb:
|
|
- *cipher = uadk_sm4_ecb;
|
|
- break;
|
|
- case NID_des_ede3_cbc:
|
|
- *cipher = uadk_des_ede3_cbc;
|
|
- break;
|
|
- case NID_des_ede3_ecb:
|
|
- *cipher = uadk_des_ede3_ecb;
|
|
- break;
|
|
- case NID_aes_128_ofb128:
|
|
- *cipher = uadk_aes_128_ofb128;
|
|
- break;
|
|
- case NID_aes_192_ofb128:
|
|
- *cipher = uadk_aes_192_ofb128;
|
|
- break;
|
|
- case NID_aes_256_ofb128:
|
|
- *cipher = uadk_aes_256_ofb128;
|
|
- break;
|
|
- case NID_aes_128_cfb128:
|
|
- *cipher = uadk_aes_128_cfb128;
|
|
- break;
|
|
- case NID_aes_192_cfb128:
|
|
- *cipher = uadk_aes_192_cfb128;
|
|
- break;
|
|
- case NID_aes_256_cfb128:
|
|
- *cipher = uadk_aes_256_cfb128;
|
|
- break;
|
|
- case NID_sm4_ofb128:
|
|
- *cipher = uadk_sm4_ofb128;
|
|
- break;
|
|
- case NID_sm4_cfb128:
|
|
- *cipher = uadk_sm4_cfb128;
|
|
- break;
|
|
- case NID_sm4_ctr:
|
|
- *cipher = uadk_sm4_ctr;
|
|
- break;
|
|
- default:
|
|
- ret = 0;
|
|
- *cipher = NULL;
|
|
- break;
|
|
- }
|
|
-
|
|
- return ret;
|
|
-}
|
|
-
|
|
static handle_t sched_single_init(handle_t h_sched_ctx, void *sched_param)
|
|
{
|
|
struct sched_params *param = (struct sched_params *)sched_param;
|
|
@@ -560,7 +391,8 @@ static int uadk_e_wd_cipher_env_init(struct uacce_dev *dev)
|
|
|
|
static int uadk_e_wd_cipher_init(struct uacce_dev *dev)
|
|
{
|
|
- int ret, i, j;
|
|
+ __u32 i, j;
|
|
+ int ret;
|
|
|
|
g_cipher_engine.numa_id = dev->numa_id;
|
|
|
|
@@ -978,210 +810,193 @@ do { \
|
|
return 0; \
|
|
} while (0)
|
|
|
|
-static int bind_v2_cipher(void)
|
|
-{
|
|
- UADK_CIPHER_DESCR(aes_128_cbc, 16, 16, 16, EVP_CIPH_CBC_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(aes_192_cbc, 16, 24, 16, EVP_CIPH_CBC_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(aes_256_cbc, 16, 32, 16, EVP_CIPH_CBC_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(aes_128_ecb, 16, 16, 0, EVP_CIPH_ECB_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(aes_192_ecb, 16, 24, 0, EVP_CIPH_ECB_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(aes_256_ecb, 16, 32, 0, EVP_CIPH_ECB_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(aes_128_xts, 1, 32, 16, EVP_CIPH_XTS_MODE | EVP_CIPH_CUSTOM_IV,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(aes_256_xts, 1, 64, 16, EVP_CIPH_XTS_MODE | EVP_CIPH_CUSTOM_IV,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(sm4_cbc, 16, 16, 16, EVP_CIPH_CBC_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(des_ede3_cbc, 8, 24, 8, EVP_CIPH_CBC_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(des_ede3_ecb, 8, 24, 0, EVP_CIPH_ECB_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(sm4_ecb, 16, 16, 0, EVP_CIPH_ECB_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
-
|
|
- return 0;
|
|
-}
|
|
-
|
|
-static int bind_v3_cipher(void)
|
|
-{
|
|
- UADK_CIPHER_DESCR(aes_128_ctr, 1, 16, 16, EVP_CIPH_CTR_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(aes_192_ctr, 1, 24, 16, EVP_CIPH_CTR_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(aes_256_ctr, 1, 32, 16, EVP_CIPH_CTR_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(aes_128_ofb128, 1, 16, 16, EVP_CIPH_OFB_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(aes_192_ofb128, 1, 24, 16, EVP_CIPH_OFB_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(aes_256_ofb128, 1, 32, 16, EVP_CIPH_OFB_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(aes_128_cfb128, 1, 16, 16, EVP_CIPH_CFB_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(aes_192_cfb128, 1, 24, 16, EVP_CIPH_CFB_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(aes_256_cfb128, 1, 32, 16, EVP_CIPH_CFB_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(sm4_ofb128, 1, 16, 16, EVP_CIPH_OFB_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(sm4_cfb128, 1, 16, 16, EVP_CIPH_OFB_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
- UADK_CIPHER_DESCR(sm4_ctr, 1, 16, 16, EVP_CIPH_CTR_MODE,
|
|
- sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
- uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
- EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
-
|
|
- return 0;
|
|
-}
|
|
-
|
|
-int uadk_e_bind_cipher(ENGINE *e)
|
|
+EVP_CIPHER *uadk_create_cipher_meth(int nid)
|
|
{
|
|
- int ret;
|
|
+ EVP_CIPHER *cipher;
|
|
|
|
- ret = uadk_get_accel_platform("cipher");
|
|
- if (!ret) {
|
|
- fprintf(stderr, "failed to get accel hardware version.\n");
|
|
- return 0;
|
|
+ switch (nid) {
|
|
+ case NID_aes_128_cbc:
|
|
+ UADK_CIPHER_DESCR(aes_128_cbc, 16, 16, 16, EVP_CIPH_CBC_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_aes_128_cbc;
|
|
+ break;
|
|
+ case NID_aes_192_cbc:
|
|
+ UADK_CIPHER_DESCR(aes_192_cbc, 16, 24, 16, EVP_CIPH_CBC_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_aes_192_cbc;
|
|
+ break;
|
|
+ case NID_aes_256_cbc:
|
|
+ UADK_CIPHER_DESCR(aes_256_cbc, 16, 32, 16, EVP_CIPH_CBC_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_aes_256_cbc;
|
|
+ break;
|
|
+ case NID_aes_128_ecb:
|
|
+ UADK_CIPHER_DESCR(aes_128_ecb, 16, 16, 0, EVP_CIPH_ECB_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_aes_128_ecb;
|
|
+ break;
|
|
+ case NID_aes_192_ecb:
|
|
+ UADK_CIPHER_DESCR(aes_192_ecb, 16, 24, 0, EVP_CIPH_ECB_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_aes_192_ecb;
|
|
+ break;
|
|
+ case NID_aes_256_ecb:
|
|
+ UADK_CIPHER_DESCR(aes_256_ecb, 16, 32, 0, EVP_CIPH_ECB_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_aes_256_ecb;
|
|
+ break;
|
|
+ case NID_aes_128_xts:
|
|
+ UADK_CIPHER_DESCR(aes_128_xts, 1, 32, 16, EVP_CIPH_XTS_MODE | EVP_CIPH_CUSTOM_IV,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_aes_128_xts;
|
|
+ break;
|
|
+ case NID_aes_256_xts:
|
|
+ UADK_CIPHER_DESCR(aes_256_xts, 1, 64, 16, EVP_CIPH_XTS_MODE | EVP_CIPH_CUSTOM_IV,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_aes_256_xts;
|
|
+ break;
|
|
+ case NID_sm4_cbc:
|
|
+ UADK_CIPHER_DESCR(sm4_cbc, 16, 16, 16, EVP_CIPH_CBC_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_sm4_cbc;
|
|
+ break;
|
|
+ case NID_des_ede3_cbc:
|
|
+ UADK_CIPHER_DESCR(des_ede3_cbc, 8, 24, 8, EVP_CIPH_CBC_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_des_ede3_cbc;
|
|
+ break;
|
|
+ case NID_des_ede3_ecb:
|
|
+ UADK_CIPHER_DESCR(des_ede3_ecb, 8, 24, 0, EVP_CIPH_ECB_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_des_ede3_ecb;
|
|
+ break;
|
|
+ case NID_aes_128_ctr:
|
|
+ UADK_CIPHER_DESCR(aes_128_ctr, 1, 16, 16, EVP_CIPH_CTR_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_aes_128_ctr;
|
|
+ break;
|
|
+ case NID_aes_192_ctr:
|
|
+ UADK_CIPHER_DESCR(aes_192_ctr, 1, 24, 16, EVP_CIPH_CTR_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_aes_192_ctr;
|
|
+ break;
|
|
+ case NID_aes_256_ctr:
|
|
+ UADK_CIPHER_DESCR(aes_256_ctr, 1, 32, 16, EVP_CIPH_CTR_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_aes_256_ctr;
|
|
+ break;
|
|
+ case NID_aes_128_ofb128:
|
|
+ UADK_CIPHER_DESCR(aes_128_ofb128, 1, 16, 16, EVP_CIPH_OFB_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_aes_128_ofb128;
|
|
+ break;
|
|
+ case NID_aes_192_ofb128:
|
|
+ UADK_CIPHER_DESCR(aes_192_ofb128, 1, 24, 16, EVP_CIPH_OFB_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_aes_192_ofb128;
|
|
+ break;
|
|
+ case NID_aes_256_ofb128:
|
|
+ UADK_CIPHER_DESCR(aes_256_ofb128, 1, 32, 16, EVP_CIPH_OFB_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_aes_256_ofb128;
|
|
+ break;
|
|
+ case NID_aes_128_cfb128:
|
|
+ UADK_CIPHER_DESCR(aes_128_cfb128, 1, 16, 16, EVP_CIPH_CFB_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_aes_128_cfb128;
|
|
+ break;
|
|
+ case NID_aes_192_cfb128:
|
|
+ UADK_CIPHER_DESCR(aes_192_cfb128, 1, 24, 16, EVP_CIPH_CFB_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_aes_192_cfb128;
|
|
+ break;
|
|
+ case NID_aes_256_cfb128:
|
|
+ UADK_CIPHER_DESCR(aes_256_cfb128, 1, 32, 16, EVP_CIPH_CFB_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_aes_256_cfb128;
|
|
+ break;
|
|
+ case NID_sm4_ofb128:
|
|
+ UADK_CIPHER_DESCR(sm4_ofb128, 1, 16, 16, EVP_CIPH_OFB_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_sm4_ofb128;
|
|
+ break;
|
|
+ case NID_sm4_cfb128:
|
|
+ UADK_CIPHER_DESCR(sm4_cfb128, 1, 16, 16, EVP_CIPH_OFB_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_sm4_cfb128;
|
|
+ break;
|
|
+ case NID_sm4_ctr:
|
|
+ UADK_CIPHER_DESCR(sm4_ctr, 1, 16, 16, EVP_CIPH_CTR_MODE,
|
|
+ sizeof(struct cipher_priv_ctx), uadk_e_cipher_init,
|
|
+ uadk_e_do_cipher, uadk_e_cipher_cleanup,
|
|
+ EVP_CIPHER_set_asn1_iv, EVP_CIPHER_get_asn1_iv);
|
|
+ cipher = uadk_sm4_ctr;
|
|
+ break;
|
|
+ default:
|
|
+ cipher = NULL;
|
|
+ break;
|
|
}
|
|
|
|
- bind_v2_cipher();
|
|
- if (platform > HW_V2)
|
|
- bind_v3_cipher();
|
|
-
|
|
- return ENGINE_set_ciphers(e, uadk_e_engine_ciphers);
|
|
-}
|
|
-
|
|
-static void destroy_v2_cipher(void)
|
|
-{
|
|
- EVP_CIPHER_meth_free(uadk_aes_128_cbc);
|
|
- uadk_aes_128_cbc = 0;
|
|
- EVP_CIPHER_meth_free(uadk_aes_192_cbc);
|
|
- uadk_aes_192_cbc = 0;
|
|
- EVP_CIPHER_meth_free(uadk_aes_256_cbc);
|
|
- uadk_aes_256_cbc = 0;
|
|
- EVP_CIPHER_meth_free(uadk_aes_128_ecb);
|
|
- uadk_aes_128_ecb = 0;
|
|
- EVP_CIPHER_meth_free(uadk_aes_192_ecb);
|
|
- uadk_aes_192_ecb = 0;
|
|
- EVP_CIPHER_meth_free(uadk_aes_256_ecb);
|
|
- uadk_aes_256_ecb = 0;
|
|
- EVP_CIPHER_meth_free(uadk_aes_128_xts);
|
|
- uadk_aes_128_xts = 0;
|
|
- EVP_CIPHER_meth_free(uadk_aes_256_xts);
|
|
- uadk_aes_256_xts = 0;
|
|
- EVP_CIPHER_meth_free(uadk_sm4_cbc);
|
|
- uadk_sm4_cbc = 0;
|
|
- EVP_CIPHER_meth_free(uadk_des_ede3_cbc);
|
|
- uadk_des_ede3_cbc = 0;
|
|
- EVP_CIPHER_meth_free(uadk_des_ede3_ecb);
|
|
- uadk_des_ede3_ecb = 0;
|
|
- EVP_CIPHER_meth_free(uadk_sm4_ecb);
|
|
- uadk_sm4_ecb = 0;
|
|
+ return cipher;
|
|
}
|
|
|
|
-static void destroy_v3_cipher(void)
|
|
+static void destroy_cipher(struct engine_cipher_info *info, int num)
|
|
{
|
|
- EVP_CIPHER_meth_free(uadk_aes_128_ctr);
|
|
- uadk_aes_128_ctr = 0;
|
|
- EVP_CIPHER_meth_free(uadk_aes_192_ctr);
|
|
- uadk_aes_192_ctr = 0;
|
|
- EVP_CIPHER_meth_free(uadk_aes_256_ctr);
|
|
- uadk_aes_256_ctr = 0;
|
|
- EVP_CIPHER_meth_free(uadk_aes_128_ofb128);
|
|
- uadk_aes_128_ofb128 = 0;
|
|
- EVP_CIPHER_meth_free(uadk_aes_192_ofb128);
|
|
- uadk_aes_192_ofb128 = 0;
|
|
- EVP_CIPHER_meth_free(uadk_aes_256_ofb128);
|
|
- uadk_aes_256_ofb128 = 0;
|
|
- EVP_CIPHER_meth_free(uadk_aes_128_cfb128);
|
|
- uadk_aes_128_cfb128 = 0;
|
|
- EVP_CIPHER_meth_free(uadk_aes_192_cfb128);
|
|
- uadk_aes_192_cfb128 = 0;
|
|
- EVP_CIPHER_meth_free(uadk_aes_256_cfb128);
|
|
- uadk_aes_256_cfb128 = 0;
|
|
- EVP_CIPHER_meth_free(uadk_sm4_cfb128);
|
|
- uadk_sm4_cfb128 = 0;
|
|
- EVP_CIPHER_meth_free(uadk_sm4_ofb128);
|
|
- uadk_sm4_ofb128 = 0;
|
|
- EVP_CIPHER_meth_free(uadk_sm4_ctr);
|
|
- uadk_sm4_ctr = 0;
|
|
-}
|
|
-
|
|
-void uadk_e_destroy_cipher(void)
|
|
-{
|
|
- __u32 i;
|
|
- int ret;
|
|
-
|
|
- if (g_cipher_engine.pid == getpid()) {
|
|
- ret = uadk_e_is_env_enabled("cipher");
|
|
- if (ret == ENV_ENABLED) {
|
|
- wd_cipher_env_uninit();
|
|
- } else {
|
|
- wd_cipher_uninit();
|
|
- for (i = 0; i < g_cipher_engine.ctx_cfg.ctx_num; i++)
|
|
- wd_release_ctx(g_cipher_engine.ctx_cfg.ctxs[i].ctx);
|
|
- free(g_cipher_engine.ctx_cfg.ctxs);
|
|
+ for (int i = 0; i != num; ++i) {
|
|
+ if (info[i].cipher != NULL) {
|
|
+ EVP_CIPHER_meth_free(info[i].cipher);
|
|
+ info[i].cipher = NULL;
|
|
}
|
|
- g_cipher_engine.pid = 0;
|
|
}
|
|
+}
|
|
|
|
- pthread_spin_destroy(&g_cipher_engine.lock);
|
|
-
|
|
- destroy_v2_cipher();
|
|
- if (platform > HW_V2)
|
|
- destroy_v3_cipher();
|
|
+void uadk_e_destroy_cipher(struct engine_cipher_info *info, int num)
|
|
+{
|
|
+ destroy_cipher(info, num);
|
|
}
|
|
|
|
void uadk_e_cipher_lock_init(void)
|
|
diff --git a/src/uadk_cipher_adapter.c b/src/uadk_cipher_adapter.c
|
|
new file mode 100644
|
|
index 0000000..c915df8
|
|
--- /dev/null
|
|
+++ b/src/uadk_cipher_adapter.c
|
|
@@ -0,0 +1,204 @@
|
|
+/*
|
|
+ * Copyright 2023 Huawei Technologies Co.,Ltd. All rights reserved.
|
|
+ *
|
|
+ * Licensed under the Apache License, Version 2.0 (the "License");
|
|
+ * you may not use this file except in compliance with the License.
|
|
+ * You may obtain a copy of the License at
|
|
+ *
|
|
+ * http://www.apache.org/licenses/LICENSE-2.0
|
|
+ *
|
|
+ * Unless required by applicable law or agreed to in writing, software
|
|
+ * distributed under the License is distributed on an "AS IS" BASIS,
|
|
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
+ * See the License for the specific language governing permissions and
|
|
+ * limitations under the License.
|
|
+ *
|
|
+ */
|
|
+#include "uadk_cipher_adapter.h"
|
|
+
|
|
+#define HW_SEC_V2 0
|
|
+#define HW_SEC_V3 1
|
|
+#define OTHERS_HW 2
|
|
+
|
|
+static int cipher_hw_v2_nids[] = {
|
|
+ NID_aes_128_cbc,
|
|
+ NID_aes_192_cbc,
|
|
+ NID_aes_256_cbc,
|
|
+ NID_aes_128_ecb,
|
|
+ NID_aes_192_ecb,
|
|
+ NID_aes_256_ecb,
|
|
+ NID_aes_128_xts,
|
|
+ NID_aes_256_xts,
|
|
+ NID_sm4_cbc,
|
|
+ NID_des_ede3_cbc,
|
|
+ NID_des_ede3_ecb,
|
|
+ NID_aes_128_gcm,
|
|
+ NID_aes_192_gcm,
|
|
+ NID_aes_256_gcm
|
|
+};
|
|
+
|
|
+static int cipher_hw_v3_nids[] = {
|
|
+ NID_aes_128_cbc,
|
|
+ NID_aes_192_cbc,
|
|
+ NID_aes_256_cbc,
|
|
+ NID_aes_128_ctr,
|
|
+ NID_aes_192_ctr,
|
|
+ NID_aes_256_ctr,
|
|
+ NID_aes_128_ecb,
|
|
+ NID_aes_192_ecb,
|
|
+ NID_aes_256_ecb,
|
|
+ NID_aes_128_xts,
|
|
+ NID_aes_256_xts,
|
|
+ NID_sm4_cbc,
|
|
+ NID_sm4_ecb,
|
|
+ NID_des_ede3_cbc,
|
|
+ NID_des_ede3_ecb,
|
|
+ NID_aes_128_cfb128,
|
|
+ NID_aes_192_cfb128,
|
|
+ NID_aes_256_cfb128,
|
|
+ NID_aes_128_ofb128,
|
|
+ NID_aes_192_ofb128,
|
|
+ NID_aes_256_ofb128,
|
|
+ NID_sm4_cfb128,
|
|
+ NID_sm4_ofb128,
|
|
+ NID_sm4_ctr,
|
|
+ NID_aes_128_gcm,
|
|
+ NID_aes_192_gcm,
|
|
+ NID_aes_256_gcm
|
|
+};
|
|
+
|
|
+static struct engine_cipher_info c_info[] = {
|
|
+ {NID_aes_128_cbc, NULL},
|
|
+ {NID_aes_192_cbc, NULL},
|
|
+ {NID_aes_256_cbc, NULL},
|
|
+ {NID_aes_128_ctr, NULL},
|
|
+ {NID_aes_192_ctr, NULL},
|
|
+ {NID_aes_256_ctr, NULL},
|
|
+ {NID_aes_128_ecb, NULL},
|
|
+ {NID_aes_192_ecb, NULL},
|
|
+ {NID_aes_256_ecb, NULL},
|
|
+ {NID_aes_128_xts, NULL},
|
|
+ {NID_aes_256_xts, NULL},
|
|
+ {NID_sm4_cbc, NULL},
|
|
+ {NID_sm4_ecb, NULL},
|
|
+ {NID_des_ede3_cbc, NULL},
|
|
+ {NID_des_ede3_ecb, NULL},
|
|
+ {NID_aes_128_cfb128, NULL},
|
|
+ {NID_aes_192_cfb128, NULL},
|
|
+ {NID_aes_256_cfb128, NULL},
|
|
+ {NID_aes_128_ofb128, NULL},
|
|
+ {NID_aes_192_ofb128, NULL},
|
|
+ {NID_aes_256_ofb128, NULL},
|
|
+ {NID_sm4_cfb128, NULL},
|
|
+ {NID_sm4_ofb128, NULL},
|
|
+ {NID_sm4_ctr, NULL},
|
|
+ {NID_aes_128_gcm, NULL},
|
|
+ {NID_aes_192_gcm, NULL},
|
|
+ {NID_aes_256_gcm, NULL}
|
|
+};
|
|
+
|
|
+static const unsigned int num_cc = ARRAY_SIZE(c_info);
|
|
+
|
|
+static void uadk_e_create_ciphers(int index)
|
|
+{
|
|
+ switch (c_info[index].nid) {
|
|
+ case NID_aes_128_gcm:
|
|
+ case NID_aes_192_gcm:
|
|
+ case NID_aes_256_gcm:
|
|
+ c_info[index].cipher = uadk_create_gcm_cipher_meth(c_info[index].nid);
|
|
+ break;
|
|
+ case NID_aes_128_cbc:
|
|
+ case NID_aes_192_cbc:
|
|
+ case NID_aes_256_cbc:
|
|
+ case NID_aes_128_ctr:
|
|
+ case NID_aes_192_ctr:
|
|
+ case NID_aes_256_ctr:
|
|
+ case NID_aes_128_ecb:
|
|
+ case NID_aes_192_ecb:
|
|
+ case NID_aes_256_ecb:
|
|
+ case NID_aes_128_xts:
|
|
+ case NID_aes_256_xts:
|
|
+ case NID_sm4_cbc:
|
|
+ case NID_sm4_ecb:
|
|
+ case NID_des_ede3_cbc:
|
|
+ case NID_des_ede3_ecb:
|
|
+ case NID_aes_128_cfb128:
|
|
+ case NID_aes_192_cfb128:
|
|
+ case NID_aes_256_cfb128:
|
|
+ case NID_aes_128_ofb128:
|
|
+ case NID_aes_192_ofb128:
|
|
+ case NID_aes_256_ofb128:
|
|
+ case NID_sm4_cfb128:
|
|
+ case NID_sm4_ofb128:
|
|
+ case NID_sm4_ctr:
|
|
+ c_info[index].cipher = uadk_create_cipher_meth(c_info[index].nid);
|
|
+ break;
|
|
+ default:
|
|
+ break;
|
|
+ }
|
|
+}
|
|
+
|
|
+int uadk_e_ciphers(ENGINE *e, const EVP_CIPHER **cipher, const int **nids, int nid)
|
|
+{
|
|
+ int platform = OTHERS_HW;
|
|
+ struct uacce_dev *dev;
|
|
+ __u32 i;
|
|
+
|
|
+ if (!e)
|
|
+ return 0;
|
|
+
|
|
+ if ((nids == NULL) && ((cipher == NULL) || (nid < 0))) {
|
|
+ if (cipher != NULL)
|
|
+ *cipher = NULL;
|
|
+ return 0;
|
|
+ }
|
|
+
|
|
+ dev = wd_get_accel_dev("cipher");
|
|
+ if (!dev) {
|
|
+ fprintf(stderr, "no device available, switch to software!\n");
|
|
+ return 0;
|
|
+ }
|
|
+
|
|
+ if (!strcmp(dev->api, "hisi_qm_v2"))
|
|
+ platform = HW_SEC_V2;
|
|
+ else if (!strcmp(dev->api, "hisi_qm_v3"))
|
|
+ platform = HW_SEC_V3;
|
|
+
|
|
+ free(dev);
|
|
+
|
|
+ if (cipher == NULL) {
|
|
+ if (platform == HW_SEC_V2) {
|
|
+ *nids = cipher_hw_v2_nids;
|
|
+ return ARRAY_SIZE(cipher_hw_v2_nids);
|
|
+ } else if (platform == HW_SEC_V3) {
|
|
+ *nids = cipher_hw_v3_nids;
|
|
+ return ARRAY_SIZE(cipher_hw_v3_nids);
|
|
+ }
|
|
+
|
|
+ return 0;
|
|
+ }
|
|
+
|
|
+ for (i = 0; i < num_cc; i++) {
|
|
+ if (nid == c_info[i].nid) {
|
|
+ if (c_info[i].cipher == NULL)
|
|
+ uadk_e_create_ciphers(i);
|
|
+
|
|
+ *cipher = c_info[i].cipher;
|
|
+ return 1;
|
|
+ }
|
|
+ }
|
|
+
|
|
+ *cipher = NULL;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+int uadk_e_bind_ciphers(ENGINE *e)
|
|
+{
|
|
+ return ENGINE_set_ciphers(e, uadk_e_ciphers);
|
|
+}
|
|
+
|
|
+void uadk_e_destroy_ciphers(void)
|
|
+{
|
|
+ uadk_e_destroy_cipher(c_info, num_cc);
|
|
+ uadk_e_destroy_aead(c_info, num_cc);
|
|
+}
|
|
diff --git a/src/uadk_cipher_adapter.h b/src/uadk_cipher_adapter.h
|
|
new file mode 100644
|
|
index 0000000..f7b6fb4
|
|
--- /dev/null
|
|
+++ b/src/uadk_cipher_adapter.h
|
|
@@ -0,0 +1,36 @@
|
|
+/*
|
|
+ * Copyright 2023 Huawei Technologies Co.,Ltd. All rights reserved.
|
|
+ *
|
|
+ * Licensed under the Apache License, Version 2.0 (the "License");
|
|
+ * you may not use this file except in compliance with the License.
|
|
+ * You may obtain a copy of the License at
|
|
+ *
|
|
+ * http://www.apache.org/licenses/LICENSE-2.0
|
|
+ *
|
|
+ * Unless required by applicable law or agreed to in writing, software
|
|
+ * distributed under the License is distributed on an "AS IS" BASIS,
|
|
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
+ * See the License for the specific language governing permissions and
|
|
+ * limitations under the License.
|
|
+ *
|
|
+ */
|
|
+#ifndef UADK_ADAPT_H
|
|
+#define UADK_ADAPT_H
|
|
+#include <openssl/engine.h>
|
|
+#include <uadk/wd.h>
|
|
+
|
|
+struct engine_cipher_info {
|
|
+ int nid;
|
|
+ EVP_CIPHER *cipher;
|
|
+};
|
|
+
|
|
+#define ARRAY_SIZE(x) (sizeof(x) / sizeof((x)[0]))
|
|
+
|
|
+EVP_CIPHER *uadk_create_gcm_cipher_meth(int nid);
|
|
+EVP_CIPHER *uadk_create_cipher_meth(int nid);
|
|
+void uadk_e_destroy_aead(struct engine_cipher_info *info, int num);
|
|
+void uadk_e_destroy_cipher(struct engine_cipher_info *info, int num);
|
|
+
|
|
+int uadk_e_bind_ciphers(ENGINE *e);
|
|
+void uadk_e_destroy_ciphers(void);
|
|
+#endif
|
|
diff --git a/src/uadk_engine_init.c b/src/uadk_engine_init.c
|
|
index cf54360..33707bf 100644
|
|
--- a/src/uadk_engine_init.c
|
|
+++ b/src/uadk_engine_init.c
|
|
@@ -24,15 +24,17 @@
|
|
#include <uadk/wd.h>
|
|
#include "uadk.h"
|
|
#include "uadk_async.h"
|
|
+#include "uadk_cipher_adapter.h"
|
|
#ifdef KAE
|
|
#include "v1/uadk_v1.h"
|
|
#endif
|
|
|
|
#define UADK_CMD_ENABLE_CIPHER_ENV ENGINE_CMD_BASE
|
|
-#define UADK_CMD_ENABLE_DIGEST_ENV (ENGINE_CMD_BASE + 1)
|
|
-#define UADK_CMD_ENABLE_RSA_ENV (ENGINE_CMD_BASE + 2)
|
|
-#define UADK_CMD_ENABLE_DH_ENV (ENGINE_CMD_BASE + 3)
|
|
-#define UADK_CMD_ENABLE_ECC_ENV (ENGINE_CMD_BASE + 4)
|
|
+#define UADK_CMD_ENABLE_AEAD_ENV (ENGINE_CMD_BASE + 1)
|
|
+#define UADK_CMD_ENABLE_DIGEST_ENV (ENGINE_CMD_BASE + 2)
|
|
+#define UADK_CMD_ENABLE_RSA_ENV (ENGINE_CMD_BASE + 3)
|
|
+#define UADK_CMD_ENABLE_DH_ENV (ENGINE_CMD_BASE + 4)
|
|
+#define UADK_CMD_ENABLE_ECC_ENV (ENGINE_CMD_BASE + 5)
|
|
|
|
/* Constants used when creating the ENGINE */
|
|
static const char *engine_uadk_id = "uadk_engine";
|
|
@@ -60,6 +62,12 @@ static const ENGINE_CMD_DEFN g_uadk_cmd_defns[] = {
|
|
"Enable or Disable cipher engine environment variable.",
|
|
ENGINE_CMD_FLAG_NUMERIC
|
|
},
|
|
+ {
|
|
+ UADK_CMD_ENABLE_AEAD_ENV,
|
|
+ "UADK_CMD_ENABLE_AEAD_ENV",
|
|
+ "Enable or Disable aead engine environment variable.",
|
|
+ ENGINE_CMD_FLAG_NUMERIC
|
|
+ },
|
|
{
|
|
UADK_CMD_ENABLE_DIGEST_ENV,
|
|
"UADK_CMD_ENABLE_DIGEST_ENV",
|
|
@@ -104,6 +112,7 @@ struct uadk_alg_env_enabled {
|
|
|
|
static struct uadk_alg_env_enabled uadk_env_enabled[] = {
|
|
{ "cipher", 0 },
|
|
+ { "aead", 0 },
|
|
{ "digest", 0 },
|
|
{ "rsa", 0 },
|
|
{ "dh", 0 },
|
|
@@ -176,6 +185,9 @@ static int uadk_engine_ctrl(ENGINE *e, int cmd, long i,
|
|
case UADK_CMD_ENABLE_CIPHER_ENV:
|
|
uadk_e_set_env_enabled("cipher", i);
|
|
break;
|
|
+ case UADK_CMD_ENABLE_AEAD_ENV:
|
|
+ uadk_e_set_env_enabled("aead", i);
|
|
+ break;
|
|
case UADK_CMD_ENABLE_DIGEST_ENV:
|
|
uadk_e_set_env_enabled("digest", i);
|
|
break;
|
|
@@ -210,7 +222,7 @@ static int uadk_destroy(ENGINE *e)
|
|
#endif
|
|
|
|
if (uadk_cipher)
|
|
- uadk_e_destroy_cipher();
|
|
+ uadk_e_destroy_ciphers();
|
|
if (uadk_digest)
|
|
uadk_e_destroy_digest();
|
|
if (uadk_rsa)
|
|
@@ -328,7 +340,7 @@ static void bind_fn_uadk_alg(ENGINE *e)
|
|
|
|
dev = wd_get_accel_dev("cipher");
|
|
if (dev) {
|
|
- if (!uadk_e_bind_cipher(e))
|
|
+ if (!uadk_e_bind_ciphers(e))
|
|
fprintf(stderr, "uadk bind cipher failed\n");
|
|
else
|
|
uadk_cipher = 1;
|
|
--
|
|
2.25.1
|
|
|