Commit a4ae58cd authored by Jakub Kicinski's avatar Jakub Kicinski Committed by David S. Miller
Browse files

tls: rx: only copy IV from the packet for TLS 1.2



TLS 1.3 and ChaChaPoly don't carry IV in the packet.
The code before this change would copy out iv_size
worth of whatever followed the TLS header in the packet
and then for TLS 1.3 | ChaCha overwrite that with
the sequence number. Waste of cycles especially
with TLS 1.2 being close to dead and TLS 1.3 being
the common case.

Signed-off-by: default avatarJakub Kicinski <kuba@kernel.org>
Signed-off-by: default avatarDavid S. Miller <davem@davemloft.net>
parent f7d45f4b
Loading
Loading
Loading
Loading
+10 −10
Original line number Diff line number Diff line
@@ -1482,6 +1482,11 @@ static int decrypt_internal(struct sock *sk, struct sk_buff *skb,
	}

	/* Prepare IV */
	if (prot->version == TLS_1_3_VERSION ||
	    prot->cipher_type == TLS_CIPHER_CHACHA20_POLY1305) {
		memcpy(iv + iv_offset, tls_ctx->rx.iv,
		       prot->iv_size + prot->salt_size);
	} else {
		err = skb_copy_bits(skb, rxm->offset + TLS_HEADER_SIZE,
				    iv + iv_offset + prot->salt_size,
				    prot->iv_size);
@@ -1489,13 +1494,8 @@ static int decrypt_internal(struct sock *sk, struct sk_buff *skb,
			kfree(mem);
			return err;
		}
	if (prot->version == TLS_1_3_VERSION ||
	    prot->cipher_type == TLS_CIPHER_CHACHA20_POLY1305)
		memcpy(iv + iv_offset, tls_ctx->rx.iv,
		       prot->iv_size + prot->salt_size);
	else
		memcpy(iv + iv_offset, tls_ctx->rx.iv, prot->salt_size);

	}
	xor_iv_with_seq(prot, iv + iv_offset, tls_ctx->rx.rec_seq);

	/* Prepare AAD */