summaryrefslogtreecommitdiff
path: root/security/integrity/ima/ima_crypto.c
diff options
context:
space:
mode:
Diffstat (limited to 'security/integrity/ima/ima_crypto.c')
-rw-r--r--security/integrity/ima/ima_crypto.c120
1 files changed, 119 insertions, 1 deletions
diff --git a/security/integrity/ima/ima_crypto.c b/security/integrity/ima/ima_crypto.c
index 6eb62936c672..38f2ed830dd6 100644
--- a/security/integrity/ima/ima_crypto.c
+++ b/security/integrity/ima/ima_crypto.c
@@ -24,7 +24,7 @@
#include <linux/err.h>
#include <linux/slab.h>
#include <crypto/hash.h>
-#include <crypto/hash_info.h>
+
#include "ima.h"
struct ahash_completion {
@@ -519,6 +519,124 @@ int ima_calc_field_array_hash(struct ima_field_data *field_data,
return rc;
}
+static int calc_buffer_ahash_atfm(const void *buf, loff_t len,
+ struct ima_digest_data *hash,
+ struct crypto_ahash *tfm)
+{
+ struct ahash_request *req;
+ struct scatterlist sg;
+ struct ahash_completion res;
+ int rc, ahash_rc = 0;
+
+ hash->length = crypto_ahash_digestsize(tfm);
+
+ req = ahash_request_alloc(tfm, GFP_KERNEL);
+ if (!req)
+ return -ENOMEM;
+
+ init_completion(&res.completion);
+ ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG |
+ CRYPTO_TFM_REQ_MAY_SLEEP,
+ ahash_complete, &res);
+
+ rc = ahash_wait(crypto_ahash_init(req), &res);
+ if (rc)
+ goto out;
+
+ sg_init_one(&sg, buf, len);
+ ahash_request_set_crypt(req, &sg, NULL, len);
+
+ ahash_rc = crypto_ahash_update(req);
+
+ /* wait for the update request to complete */
+ rc = ahash_wait(ahash_rc, &res);
+ if (!rc) {
+ ahash_request_set_crypt(req, NULL, hash->digest, 0);
+ rc = ahash_wait(crypto_ahash_final(req), &res);
+ }
+out:
+ ahash_request_free(req);
+ return rc;
+}
+
+static int calc_buffer_ahash(const void *buf, loff_t len,
+ struct ima_digest_data *hash)
+{
+ struct crypto_ahash *tfm;
+ int rc;
+
+ tfm = ima_alloc_atfm(hash->algo);
+ if (IS_ERR(tfm))
+ return PTR_ERR(tfm);
+
+ rc = calc_buffer_ahash_atfm(buf, len, hash, tfm);
+
+ ima_free_atfm(tfm);
+
+ return rc;
+}
+
+static int calc_buffer_shash_tfm(const void *buf, loff_t size,
+ struct ima_digest_data *hash,
+ struct crypto_shash *tfm)
+{
+ SHASH_DESC_ON_STACK(shash, tfm);
+ unsigned int len;
+ int rc;
+
+ shash->tfm = tfm;
+ shash->flags = 0;
+
+ hash->length = crypto_shash_digestsize(tfm);
+
+ rc = crypto_shash_init(shash);
+ if (rc != 0)
+ return rc;
+
+ while (size) {
+ len = size < PAGE_SIZE ? size : PAGE_SIZE;
+ rc = crypto_shash_update(shash, buf, len);
+ if (rc)
+ break;
+ buf += len;
+ size -= len;
+ }
+
+ if (!rc)
+ rc = crypto_shash_final(shash, hash->digest);
+ return rc;
+}
+
+static int calc_buffer_shash(const void *buf, loff_t len,
+ struct ima_digest_data *hash)
+{
+ struct crypto_shash *tfm;
+ int rc;
+
+ tfm = ima_alloc_tfm(hash->algo);
+ if (IS_ERR(tfm))
+ return PTR_ERR(tfm);
+
+ rc = calc_buffer_shash_tfm(buf, len, hash, tfm);
+
+ ima_free_tfm(tfm);
+ return rc;
+}
+
+int ima_calc_buffer_hash(const void *buf, loff_t len,
+ struct ima_digest_data *hash)
+{
+ int rc;
+
+ if (ima_ahash_minsize && len >= ima_ahash_minsize) {
+ rc = calc_buffer_ahash(buf, len, hash);
+ if (!rc)
+ return 0;
+ }
+
+ return calc_buffer_shash(buf, len, hash);
+}
+
static void __init ima_pcrread(int idx, u8 *pcr)
{
if (!ima_used_chip)