symmetric_cipher.c 8.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256
  1. /**
  2. * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
  3. * SPDX-License-Identifier: Apache-2.0.
  4. */
  5. #include <aws/cal/private/symmetric_cipher_priv.h>
  6. #include <aws/cal/symmetric_cipher.h>
  7. #include <aws/common/device_random.h>
  8. #ifndef BYO_CRYPTO
  9. extern struct aws_symmetric_cipher *aws_aes_cbc_256_new_impl(
  10. struct aws_allocator *allocator,
  11. const struct aws_byte_cursor *key,
  12. const struct aws_byte_cursor *iv);
  13. extern struct aws_symmetric_cipher *aws_aes_ctr_256_new_impl(
  14. struct aws_allocator *allocator,
  15. const struct aws_byte_cursor *key,
  16. const struct aws_byte_cursor *iv);
  17. extern struct aws_symmetric_cipher *aws_aes_gcm_256_new_impl(
  18. struct aws_allocator *allocator,
  19. const struct aws_byte_cursor *key,
  20. const struct aws_byte_cursor *iv,
  21. const struct aws_byte_cursor *aad,
  22. const struct aws_byte_cursor *decryption_tag);
  23. extern struct aws_symmetric_cipher *aws_aes_keywrap_256_new_impl(
  24. struct aws_allocator *allocator,
  25. const struct aws_byte_cursor *key);
  26. #else /* BYO_CRYPTO */
  27. struct aws_symmetric_cipher *aws_aes_cbc_256_new_impl(
  28. struct aws_allocator *allocator,
  29. const struct aws_byte_cursor *key,
  30. const struct aws_byte_cursor *iv) {
  31. (void)allocator;
  32. (void)key;
  33. (void)iv;
  34. abort();
  35. }
  36. struct aws_symmetric_cipher *aws_aes_ctr_256_new_impl(
  37. struct aws_allocator *allocator,
  38. const struct aws_byte_cursor *key,
  39. const struct aws_byte_cursor *iv) {
  40. (void)allocator;
  41. (void)key;
  42. (void)iv;
  43. abort();
  44. }
  45. struct aws_symmetric_cipher *aws_aes_gcm_256_new_impl(
  46. struct aws_allocator *allocator,
  47. const struct aws_byte_cursor *key,
  48. const struct aws_byte_cursor *iv,
  49. const struct aws_byte_cursor *aad,
  50. const struct aws_byte_cursor *decryption_tag) {
  51. (void)allocator;
  52. (void)key;
  53. (void)iv;
  54. (void)aad;
  55. (void)decryption_tag;
  56. abort();
  57. }
  58. struct aws_symmetric_cipher *aws_aes_keywrap_256_new_impl(
  59. struct aws_allocator *allocator,
  60. const struct aws_byte_cursor *key) {
  61. (void)allocator;
  62. (void)key;
  63. abort();
  64. }
  65. #endif /* BYO_CRYPTO */
  66. static aws_aes_cbc_256_new_fn *s_aes_cbc_new_fn = aws_aes_cbc_256_new_impl;
  67. static aws_aes_ctr_256_new_fn *s_aes_ctr_new_fn = aws_aes_ctr_256_new_impl;
  68. static aws_aes_gcm_256_new_fn *s_aes_gcm_new_fn = aws_aes_gcm_256_new_impl;
  69. static aws_aes_keywrap_256_new_fn *s_aes_keywrap_new_fn = aws_aes_keywrap_256_new_impl;
  70. static int s_check_input_size_limits(const struct aws_symmetric_cipher *cipher, const struct aws_byte_cursor *input) {
  71. /* libcrypto uses int, not size_t, so this is the limit.
  72. * For simplicity, enforce the same rules on all platforms. */
  73. return input->len <= INT_MAX - cipher->block_size ? AWS_OP_SUCCESS
  74. : aws_raise_error(AWS_ERROR_CAL_BUFFER_TOO_LARGE_FOR_ALGORITHM);
  75. }
  76. static int s_validate_key_materials(
  77. const struct aws_byte_cursor *key,
  78. size_t expected_key_size,
  79. const struct aws_byte_cursor *iv,
  80. size_t expected_iv_size) {
  81. if (key && key->len != expected_key_size) {
  82. return aws_raise_error(AWS_ERROR_CAL_INVALID_KEY_LENGTH_FOR_ALGORITHM);
  83. }
  84. if (iv && iv->len != expected_iv_size) {
  85. return aws_raise_error(AWS_ERROR_CAL_INVALID_CIPHER_MATERIAL_SIZE_FOR_ALGORITHM);
  86. }
  87. return AWS_OP_SUCCESS;
  88. }
  89. struct aws_symmetric_cipher *aws_aes_cbc_256_new(
  90. struct aws_allocator *allocator,
  91. const struct aws_byte_cursor *key,
  92. const struct aws_byte_cursor *iv) {
  93. if (s_validate_key_materials(key, AWS_AES_256_KEY_BYTE_LEN, iv, AWS_AES_256_CIPHER_BLOCK_SIZE) != AWS_OP_SUCCESS) {
  94. return NULL;
  95. }
  96. return s_aes_cbc_new_fn(allocator, key, iv);
  97. }
  98. struct aws_symmetric_cipher *aws_aes_ctr_256_new(
  99. struct aws_allocator *allocator,
  100. const struct aws_byte_cursor *key,
  101. const struct aws_byte_cursor *iv) {
  102. if (s_validate_key_materials(key, AWS_AES_256_KEY_BYTE_LEN, iv, AWS_AES_256_CIPHER_BLOCK_SIZE) != AWS_OP_SUCCESS) {
  103. return NULL;
  104. }
  105. return s_aes_ctr_new_fn(allocator, key, iv);
  106. }
  107. struct aws_symmetric_cipher *aws_aes_gcm_256_new(
  108. struct aws_allocator *allocator,
  109. const struct aws_byte_cursor *key,
  110. const struct aws_byte_cursor *iv,
  111. const struct aws_byte_cursor *aad,
  112. const struct aws_byte_cursor *decryption_tag) {
  113. if (s_validate_key_materials(key, AWS_AES_256_KEY_BYTE_LEN, iv, AWS_AES_256_CIPHER_BLOCK_SIZE - sizeof(uint32_t)) !=
  114. AWS_OP_SUCCESS) {
  115. return NULL;
  116. }
  117. return s_aes_gcm_new_fn(allocator, key, iv, aad, decryption_tag);
  118. }
  119. struct aws_symmetric_cipher *aws_aes_keywrap_256_new(
  120. struct aws_allocator *allocator,
  121. const struct aws_byte_cursor *key) {
  122. if (s_validate_key_materials(key, AWS_AES_256_KEY_BYTE_LEN, NULL, 0) != AWS_OP_SUCCESS) {
  123. return NULL;
  124. }
  125. return s_aes_keywrap_new_fn(allocator, key);
  126. }
  127. void aws_symmetric_cipher_destroy(struct aws_symmetric_cipher *cipher) {
  128. if (cipher) {
  129. cipher->vtable->destroy(cipher);
  130. }
  131. }
  132. int aws_symmetric_cipher_encrypt(
  133. struct aws_symmetric_cipher *cipher,
  134. struct aws_byte_cursor to_encrypt,
  135. struct aws_byte_buf *out) {
  136. if (AWS_UNLIKELY(s_check_input_size_limits(cipher, &to_encrypt) != AWS_OP_SUCCESS)) {
  137. return AWS_OP_ERR;
  138. }
  139. if (cipher->good) {
  140. return cipher->vtable->encrypt(cipher, to_encrypt, out);
  141. }
  142. return aws_raise_error(AWS_ERROR_INVALID_STATE);
  143. }
  144. int aws_symmetric_cipher_decrypt(
  145. struct aws_symmetric_cipher *cipher,
  146. struct aws_byte_cursor to_decrypt,
  147. struct aws_byte_buf *out) {
  148. if (AWS_UNLIKELY(s_check_input_size_limits(cipher, &to_decrypt) != AWS_OP_SUCCESS)) {
  149. return AWS_OP_ERR;
  150. }
  151. if (cipher->good) {
  152. return cipher->vtable->decrypt(cipher, to_decrypt, out);
  153. }
  154. return aws_raise_error(AWS_ERROR_INVALID_STATE);
  155. }
  156. int aws_symmetric_cipher_finalize_encryption(struct aws_symmetric_cipher *cipher, struct aws_byte_buf *out) {
  157. if (cipher->good) {
  158. int ret_val = cipher->vtable->finalize_encryption(cipher, out);
  159. cipher->good = false;
  160. return ret_val;
  161. }
  162. return aws_raise_error(AWS_ERROR_INVALID_STATE);
  163. }
  164. int aws_symmetric_cipher_finalize_decryption(struct aws_symmetric_cipher *cipher, struct aws_byte_buf *out) {
  165. if (cipher->good) {
  166. int ret_val = cipher->vtable->finalize_decryption(cipher, out);
  167. cipher->good = false;
  168. return ret_val;
  169. }
  170. return aws_raise_error(AWS_ERROR_INVALID_STATE);
  171. }
  172. int aws_symmetric_cipher_reset(struct aws_symmetric_cipher *cipher) {
  173. int ret_val = cipher->vtable->reset(cipher);
  174. if (ret_val == AWS_OP_SUCCESS) {
  175. cipher->good = true;
  176. }
  177. return ret_val;
  178. }
  179. struct aws_byte_cursor aws_symmetric_cipher_get_tag(const struct aws_symmetric_cipher *cipher) {
  180. return aws_byte_cursor_from_buf(&cipher->tag);
  181. }
  182. struct aws_byte_cursor aws_symmetric_cipher_get_initialization_vector(const struct aws_symmetric_cipher *cipher) {
  183. return aws_byte_cursor_from_buf(&cipher->iv);
  184. }
  185. struct aws_byte_cursor aws_symmetric_cipher_get_key(const struct aws_symmetric_cipher *cipher) {
  186. return aws_byte_cursor_from_buf(&cipher->key);
  187. }
  188. bool aws_symmetric_cipher_is_good(const struct aws_symmetric_cipher *cipher) {
  189. return cipher->good;
  190. }
  191. void aws_symmetric_cipher_generate_initialization_vector(
  192. size_t len_bytes,
  193. bool is_counter_mode,
  194. struct aws_byte_buf *out) {
  195. size_t counter_len = is_counter_mode ? sizeof(uint32_t) : 0;
  196. AWS_ASSERT(len_bytes > counter_len);
  197. size_t rand_len = len_bytes - counter_len;
  198. AWS_FATAL_ASSERT(aws_device_random_buffer_append(out, rand_len) == AWS_OP_SUCCESS);
  199. if (is_counter_mode) {
  200. /* put counter at the end, initialized to 1 */
  201. aws_byte_buf_write_be32(out, 1);
  202. }
  203. }
  204. void aws_symmetric_cipher_generate_key(size_t key_len_bytes, struct aws_byte_buf *out) {
  205. AWS_FATAL_ASSERT(aws_device_random_buffer_append(out, key_len_bytes) == AWS_OP_SUCCESS);
  206. }
  207. int aws_symmetric_cipher_try_ensure_sufficient_buffer_space(struct aws_byte_buf *buf, size_t size) {
  208. if (buf->capacity - buf->len < size) {
  209. return aws_byte_buf_reserve_relative(buf, size);
  210. }
  211. return AWS_OP_SUCCESS;
  212. }