Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 2f47d580 authored by Harsh Jain's avatar Harsh Jain Committed by Herbert Xu
Browse files

crypto: chelsio - Move DMA un/mapping to chcr from lld cxgb4 driver



Allow chcr to do DMA mapping/Unmapping instead of lld cxgb4.
It moves "Copy AAD to dst buffer" requirement from driver to
firmware.

Signed-off-by: default avatarHarsh Jain <harsh@chelsio.com>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 2956f36c
Loading
Loading
Loading
Loading
+1015 −630

File changed.

Preview size limit exceeded, changes collapsed.

+9 −35
Original line number Diff line number Diff line
@@ -214,27 +214,22 @@
					   calc_tx_flits_ofld(skb) * 8), 16)))

#define FILL_CMD_MORE(immdatalen) htonl(ULPTX_CMD_V(ULP_TX_SC_IMM) |\
					ULP_TX_SC_MORE_V((immdatalen) ? 0 : 1))

					ULP_TX_SC_MORE_V((immdatalen)))
#define MAX_NK 8
#define CRYPTO_MAX_IMM_TX_PKT_LEN 256
#define MAX_WR_SIZE			512
#define ROUND_16(bytes)		((bytes) & 0xFFFFFFF0)
#define MAX_DSGL_ENT			32
#define MAX_DIGEST_SKB_SGE	(MAX_SKB_FRAGS - 1)
#define MIN_CIPHER_SG			1 /* IV */
#define MIN_AUTH_SG			2 /*IV + AAD*/
#define MIN_GCM_SG			2 /* IV + AAD*/
#define MIN_AUTH_SG			1 /* IV */
#define MIN_GCM_SG			1 /* IV */
#define MIN_DIGEST_SG			1 /*Partial Buffer*/
#define MIN_CCM_SG			3 /*IV+AAD+B0*/
#define MIN_CCM_SG			2 /*IV+B0*/
#define SPACE_LEFT(len) \
	((MAX_WR_SIZE - WR_MIN_LEN - (len)))
	((SGE_MAX_WR_LEN - WR_MIN_LEN - (len)))

unsigned int sgl_ent_len[] = {0, 0, 16, 24, 40,
				48, 64, 72, 88,
				96, 112, 120, 136,
				144, 160, 168, 184,
				192};
unsigned int sgl_ent_len[] = {0, 0, 16, 24, 40, 48, 64, 72, 88,
				96, 112, 120, 136, 144, 160, 168, 184,
				192, 208, 216, 232, 240, 256, 264, 280,
				288, 304, 312, 328, 336, 352, 360, 376};
unsigned int dsgl_ent_len[] = {0, 32, 32, 48, 48, 64, 64, 80, 80,
				112, 112, 128, 128, 144, 144, 160, 160,
				192, 192, 208, 208, 224, 224, 240, 240,
@@ -258,7 +253,6 @@ struct hash_wr_param {

struct cipher_wr_param {
	struct ablkcipher_request *req;
	struct scatterlist *srcsg;
	char *iv;
	int bytes;
	unsigned short qid;
@@ -298,31 +292,11 @@ enum {
	ICV_16 = 16
};

struct hash_op_params {
	unsigned char mk_size;
	unsigned char pad_align;
	unsigned char auth_mode;
	char hash_name[MAX_HASH_NAME];
	unsigned short block_size;
	unsigned short word_size;
	unsigned short ipad_size;
};

struct phys_sge_pairs {
	__be16 len[8];
	__be64 addr[8];
};

struct phys_sge_parm {
	unsigned int nents;
	unsigned int obsize;
	unsigned short qid;
};

struct crypto_result {
	struct completion completion;
	int err;
};

static const u32 sha1_init[SHA1_DIGEST_SIZE / 4] = {
		SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4,
+85 −29
Original line number Diff line number Diff line
@@ -149,9 +149,23 @@

#define CHCR_HASH_MAX_BLOCK_SIZE_64  64
#define CHCR_HASH_MAX_BLOCK_SIZE_128 128
#define CHCR_SG_SIZE 2048
#define CHCR_SRC_SG_SIZE (0x10000 - sizeof(int))
#define CHCR_DST_SG_SIZE 2048

/* Aligned to 128 bit boundary */
static inline struct chcr_context *a_ctx(struct crypto_aead *tfm)
{
	return crypto_aead_ctx(tfm);
}

static inline struct chcr_context *c_ctx(struct crypto_ablkcipher *tfm)
{
	return crypto_ablkcipher_ctx(tfm);
}

static inline struct chcr_context *h_ctx(struct crypto_ahash *tfm)
{
	return crypto_tfm_ctx(crypto_ahash_tfm(tfm));
}

struct ablk_ctx {
	struct crypto_skcipher *sw_cipher;
@@ -165,15 +179,39 @@ struct ablk_ctx {
};
struct chcr_aead_reqctx {
	struct	sk_buff	*skb;
	struct scatterlist *dst;
	struct scatterlist srcffwd[2];
	struct scatterlist dstffwd[2];
	dma_addr_t iv_dma;
	dma_addr_t b0_dma;
	unsigned int b0_len;
	unsigned int op;
	short int aad_nents;
	short int src_nents;
	short int dst_nents;
	u16 imm;
	u16 verify;
	u8 iv[CHCR_MAX_CRYPTO_IV_LEN];
	unsigned char scratch_pad[MAX_SCRATCH_PAD_SIZE];
};

struct ulptx_walk {
	struct ulptx_sgl *sgl;
	unsigned int nents;
	unsigned int pair_idx;
	unsigned int last_sg_len;
	struct scatterlist *last_sg;
	struct ulptx_sge_pair *pair;

};

struct dsgl_walk {
	unsigned int nents;
	unsigned int last_sg_len;
	struct scatterlist *last_sg;
	struct cpl_rx_phys_dsgl *dsgl;
	struct phys_sge_pairs *to;
};



struct chcr_gcm_ctx {
	u8 ghash_h[AEAD_H_SIZE];
};
@@ -194,7 +232,6 @@ struct __aead_ctx {
struct chcr_aead_ctx {
	__be32 key_ctx_hdr;
	unsigned int enckey_len;
	struct crypto_skcipher *null;
	struct crypto_aead *sw_cipher;
	u8 salt[MAX_SALT];
	u8 key[CHCR_AES_MAX_KEY_LEN];
@@ -230,8 +267,11 @@ struct chcr_ahash_req_ctx {
	u8 bfr2[CHCR_HASH_MAX_BLOCK_SIZE_128];
	u8 *reqbfr;
	u8 *skbfr;
	dma_addr_t dma_addr;
	u32 dma_len;
	u8 reqlen;
	/* DMA the partial hash in it */
	u8 imm;
	u8 is_sg_map;
	u8 partial_hash[CHCR_HASH_MAX_DIGEST_SIZE];
	u64 data_len;  /* Data len till time */
	/* SKB which is being sent to the hardware for processing */
@@ -240,14 +280,15 @@ struct chcr_ahash_req_ctx {

struct chcr_blkcipher_req_ctx {
	struct sk_buff *skb;
	struct scatterlist srcffwd[2];
	struct scatterlist dstffwd[2];
	struct scatterlist *dstsg;
	struct scatterlist *dst;
	unsigned int processed;
	unsigned int last_req_len;
	struct scatterlist *srcsg;
	unsigned int src_ofst;
	unsigned int dst_ofst;
	unsigned int op;
	short int dst_nents;
	dma_addr_t iv_dma;
	u16 imm;
	u8 iv[CHCR_MAX_CRYPTO_IV_LEN];
};

@@ -261,24 +302,6 @@ struct chcr_alg_template {
	} alg;
};

struct chcr_req_ctx {
	union {
		struct ahash_request *ahash_req;
		struct aead_request *aead_req;
		struct ablkcipher_request *ablk_req;
	} req;
	union {
		struct chcr_ahash_req_ctx *ahash_ctx;
		struct chcr_aead_reqctx *reqctx;
		struct chcr_blkcipher_req_ctx *ablk_ctx;
	} ctx;
};

struct sge_opaque_hdr {
	void *dev;
	dma_addr_t addr[MAX_SKB_FRAGS + 1];
};

typedef struct sk_buff *(*create_wr_t)(struct aead_request *req,
				       unsigned short qid,
				       int size,
@@ -291,4 +314,37 @@ static int chcr_aead_op(struct aead_request *req_base,
static inline int get_aead_subtype(struct crypto_aead *aead);
static int chcr_handle_cipher_resp(struct ablkcipher_request *req,
				   unsigned char *input, int err);
static void chcr_verify_tag(struct aead_request *req, u8 *input, int *err);
static int chcr_aead_dma_map(struct device *dev, struct aead_request *req,
			     unsigned short op_type);
static void chcr_aead_dma_unmap(struct device *dev, struct aead_request
				*req, unsigned short op_type);
static inline void chcr_add_aead_dst_ent(struct aead_request *req,
				    struct cpl_rx_phys_dsgl *phys_cpl,
				    unsigned int assoclen,
				    unsigned short op_type,
				    unsigned short qid);
static inline void chcr_add_aead_src_ent(struct aead_request *req,
				    struct ulptx_sgl *ulptx,
				    unsigned int assoclen,
				    unsigned short op_type);
static inline void chcr_add_cipher_src_ent(struct ablkcipher_request *req,
					   struct ulptx_sgl *ulptx,
					   struct  cipher_wr_param *wrparam);
static int chcr_cipher_dma_map(struct device *dev,
			       struct ablkcipher_request *req);
static void chcr_cipher_dma_unmap(struct device *dev,
				  struct ablkcipher_request *req);
static inline void chcr_add_cipher_dst_ent(struct ablkcipher_request *req,
					   struct cpl_rx_phys_dsgl *phys_cpl,
					   struct  cipher_wr_param *wrparam,
					   unsigned short qid);
int sg_nents_len_skip(struct scatterlist *sg, u64 len, u64 skip);
static inline void chcr_add_hash_src_ent(struct ahash_request *req,
					 struct ulptx_sgl *ulptx,
					 struct hash_wr_param *param);
static inline int chcr_hash_dma_map(struct device *dev,
				    struct ahash_request *req);
static inline void chcr_hash_dma_unmap(struct device *dev,
				       struct ahash_request *req);
#endif /* __CHCR_CRYPTO_H__ */
+7 −1
Original line number Diff line number Diff line
@@ -1537,6 +1537,12 @@ int t4_mgmt_tx(struct adapter *adap, struct sk_buff *skb)
 */
static inline int is_ofld_imm(const struct sk_buff *skb)
{
	struct work_request_hdr *req = (struct work_request_hdr *)skb->data;
	unsigned long opcode = FW_WR_OP_G(ntohl(req->wr_hi));

	if (opcode == FW_CRYPTO_LOOKASIDE_WR)
		return skb->len <= SGE_MAX_WR_LEN;
	else
		return skb->len <= MAX_IMM_TX_PKT_LEN;
}