sph_sha2.c 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691
  1. /* $Id: sha2.c 227 2010-06-16 17:28:38Z tp $ */
  2. /*
  3. * SHA-224 / SHA-256 implementation.
  4. *
  5. * ==========================(LICENSE BEGIN)============================
  6. *
  7. * Copyright (c) 2007-2010 Projet RNRT SAPHIR
  8. *
  9. * Permission is hereby granted, free of charge, to any person obtaining
  10. * a copy of this software and associated documentation files (the
  11. * "Software"), to deal in the Software without restriction, including
  12. * without limitation the rights to use, copy, modify, merge, publish,
  13. * distribute, sublicense, and/or sell copies of the Software, and to
  14. * permit persons to whom the Software is furnished to do so, subject to
  15. * the following conditions:
  16. *
  17. * The above copyright notice and this permission notice shall be
  18. * included in all copies or substantial portions of the Software.
  19. *
  20. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  21. * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  22. * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
  23. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
  24. * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
  25. * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
  26. * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  27. *
  28. * ===========================(LICENSE END)=============================
  29. *
  30. * @author Thomas Pornin <thomas.pornin@cryptolog.com>
  31. */
  32. #include <stddef.h>
  33. #include <string.h>
  34. #include "sph_sha2.h"
  35. #if SPH_SMALL_FOOTPRINT && !defined SPH_SMALL_FOOTPRINT_SHA2
  36. #define SPH_SMALL_FOOTPRINT_SHA2 1
  37. #endif
  38. #define CH(X, Y, Z) ((((Y) ^ (Z)) & (X)) ^ (Z))
  39. #define MAJ(X, Y, Z) (((Y) & (Z)) | (((Y) | (Z)) & (X)))
  40. #define ROTR SPH_ROTR32
  41. #define BSG2_0(x) (ROTR(x, 2) ^ ROTR(x, 13) ^ ROTR(x, 22))
  42. #define BSG2_1(x) (ROTR(x, 6) ^ ROTR(x, 11) ^ ROTR(x, 25))
  43. #define SSG2_0(x) (ROTR(x, 7) ^ ROTR(x, 18) ^ SPH_T32((x) >> 3))
  44. #define SSG2_1(x) (ROTR(x, 17) ^ ROTR(x, 19) ^ SPH_T32((x) >> 10))
  45. static const sph_u32 H224[8] = {
  46. SPH_C32(0xC1059ED8), SPH_C32(0x367CD507), SPH_C32(0x3070DD17),
  47. SPH_C32(0xF70E5939), SPH_C32(0xFFC00B31), SPH_C32(0x68581511),
  48. SPH_C32(0x64F98FA7), SPH_C32(0xBEFA4FA4)
  49. };
  50. static const sph_u32 H256[8] = {
  51. SPH_C32(0x6A09E667), SPH_C32(0xBB67AE85), SPH_C32(0x3C6EF372),
  52. SPH_C32(0xA54FF53A), SPH_C32(0x510E527F), SPH_C32(0x9B05688C),
  53. SPH_C32(0x1F83D9AB), SPH_C32(0x5BE0CD19)
  54. };
  55. /*
  56. * The SHA2_ROUND_BODY defines the body for a SHA-224 / SHA-256
  57. * compression function implementation. The "in" parameter should
  58. * evaluate, when applied to a numerical input parameter from 0 to 15,
  59. * to an expression which yields the corresponding input block. The "r"
  60. * parameter should evaluate to an array or pointer expression
  61. * designating the array of 8 words which contains the input and output
  62. * of the compression function.
  63. */
  64. #if SPH_SMALL_FOOTPRINT_SHA2
  65. static const sph_u32 K[64] = {
  66. SPH_C32(0x428A2F98), SPH_C32(0x71374491),
  67. SPH_C32(0xB5C0FBCF), SPH_C32(0xE9B5DBA5),
  68. SPH_C32(0x3956C25B), SPH_C32(0x59F111F1),
  69. SPH_C32(0x923F82A4), SPH_C32(0xAB1C5ED5),
  70. SPH_C32(0xD807AA98), SPH_C32(0x12835B01),
  71. SPH_C32(0x243185BE), SPH_C32(0x550C7DC3),
  72. SPH_C32(0x72BE5D74), SPH_C32(0x80DEB1FE),
  73. SPH_C32(0x9BDC06A7), SPH_C32(0xC19BF174),
  74. SPH_C32(0xE49B69C1), SPH_C32(0xEFBE4786),
  75. SPH_C32(0x0FC19DC6), SPH_C32(0x240CA1CC),
  76. SPH_C32(0x2DE92C6F), SPH_C32(0x4A7484AA),
  77. SPH_C32(0x5CB0A9DC), SPH_C32(0x76F988DA),
  78. SPH_C32(0x983E5152), SPH_C32(0xA831C66D),
  79. SPH_C32(0xB00327C8), SPH_C32(0xBF597FC7),
  80. SPH_C32(0xC6E00BF3), SPH_C32(0xD5A79147),
  81. SPH_C32(0x06CA6351), SPH_C32(0x14292967),
  82. SPH_C32(0x27B70A85), SPH_C32(0x2E1B2138),
  83. SPH_C32(0x4D2C6DFC), SPH_C32(0x53380D13),
  84. SPH_C32(0x650A7354), SPH_C32(0x766A0ABB),
  85. SPH_C32(0x81C2C92E), SPH_C32(0x92722C85),
  86. SPH_C32(0xA2BFE8A1), SPH_C32(0xA81A664B),
  87. SPH_C32(0xC24B8B70), SPH_C32(0xC76C51A3),
  88. SPH_C32(0xD192E819), SPH_C32(0xD6990624),
  89. SPH_C32(0xF40E3585), SPH_C32(0x106AA070),
  90. SPH_C32(0x19A4C116), SPH_C32(0x1E376C08),
  91. SPH_C32(0x2748774C), SPH_C32(0x34B0BCB5),
  92. SPH_C32(0x391C0CB3), SPH_C32(0x4ED8AA4A),
  93. SPH_C32(0x5B9CCA4F), SPH_C32(0x682E6FF3),
  94. SPH_C32(0x748F82EE), SPH_C32(0x78A5636F),
  95. SPH_C32(0x84C87814), SPH_C32(0x8CC70208),
  96. SPH_C32(0x90BEFFFA), SPH_C32(0xA4506CEB),
  97. SPH_C32(0xBEF9A3F7), SPH_C32(0xC67178F2)
  98. };
  99. #define SHA2_MEXP1(in, pc) do { \
  100. W[pc] = in(pc); \
  101. } while (0)
  102. #define SHA2_MEXP2(in, pc) do { \
  103. W[(pc) & 0x0F] = SPH_T32(SSG2_1(W[((pc) - 2) & 0x0F]) \
  104. + W[((pc) - 7) & 0x0F] \
  105. + SSG2_0(W[((pc) - 15) & 0x0F]) + W[(pc) & 0x0F]); \
  106. } while (0)
  107. #define SHA2_STEPn(n, a, b, c, d, e, f, g, h, in, pc) do { \
  108. sph_u32 t1, t2; \
  109. SHA2_MEXP ## n(in, pc); \
  110. t1 = SPH_T32(h + BSG2_1(e) + CH(e, f, g) \
  111. + K[pcount + (pc)] + W[(pc) & 0x0F]); \
  112. t2 = SPH_T32(BSG2_0(a) + MAJ(a, b, c)); \
  113. d = SPH_T32(d + t1); \
  114. h = SPH_T32(t1 + t2); \
  115. } while (0)
  116. #define SHA2_STEP1(a, b, c, d, e, f, g, h, in, pc) \
  117. SHA2_STEPn(1, a, b, c, d, e, f, g, h, in, pc)
  118. #define SHA2_STEP2(a, b, c, d, e, f, g, h, in, pc) \
  119. SHA2_STEPn(2, a, b, c, d, e, f, g, h, in, pc)
  120. #define SHA2_ROUND_BODY(in, r) do { \
  121. sph_u32 A, B, C, D, E, F, G, H; \
  122. sph_u32 W[16]; \
  123. unsigned pcount; \
  124. \
  125. A = (r)[0]; \
  126. B = (r)[1]; \
  127. C = (r)[2]; \
  128. D = (r)[3]; \
  129. E = (r)[4]; \
  130. F = (r)[5]; \
  131. G = (r)[6]; \
  132. H = (r)[7]; \
  133. pcount = 0; \
  134. SHA2_STEP1(A, B, C, D, E, F, G, H, in, 0); \
  135. SHA2_STEP1(H, A, B, C, D, E, F, G, in, 1); \
  136. SHA2_STEP1(G, H, A, B, C, D, E, F, in, 2); \
  137. SHA2_STEP1(F, G, H, A, B, C, D, E, in, 3); \
  138. SHA2_STEP1(E, F, G, H, A, B, C, D, in, 4); \
  139. SHA2_STEP1(D, E, F, G, H, A, B, C, in, 5); \
  140. SHA2_STEP1(C, D, E, F, G, H, A, B, in, 6); \
  141. SHA2_STEP1(B, C, D, E, F, G, H, A, in, 7); \
  142. SHA2_STEP1(A, B, C, D, E, F, G, H, in, 8); \
  143. SHA2_STEP1(H, A, B, C, D, E, F, G, in, 9); \
  144. SHA2_STEP1(G, H, A, B, C, D, E, F, in, 10); \
  145. SHA2_STEP1(F, G, H, A, B, C, D, E, in, 11); \
  146. SHA2_STEP1(E, F, G, H, A, B, C, D, in, 12); \
  147. SHA2_STEP1(D, E, F, G, H, A, B, C, in, 13); \
  148. SHA2_STEP1(C, D, E, F, G, H, A, B, in, 14); \
  149. SHA2_STEP1(B, C, D, E, F, G, H, A, in, 15); \
  150. for (pcount = 16; pcount < 64; pcount += 16) { \
  151. SHA2_STEP2(A, B, C, D, E, F, G, H, in, 0); \
  152. SHA2_STEP2(H, A, B, C, D, E, F, G, in, 1); \
  153. SHA2_STEP2(G, H, A, B, C, D, E, F, in, 2); \
  154. SHA2_STEP2(F, G, H, A, B, C, D, E, in, 3); \
  155. SHA2_STEP2(E, F, G, H, A, B, C, D, in, 4); \
  156. SHA2_STEP2(D, E, F, G, H, A, B, C, in, 5); \
  157. SHA2_STEP2(C, D, E, F, G, H, A, B, in, 6); \
  158. SHA2_STEP2(B, C, D, E, F, G, H, A, in, 7); \
  159. SHA2_STEP2(A, B, C, D, E, F, G, H, in, 8); \
  160. SHA2_STEP2(H, A, B, C, D, E, F, G, in, 9); \
  161. SHA2_STEP2(G, H, A, B, C, D, E, F, in, 10); \
  162. SHA2_STEP2(F, G, H, A, B, C, D, E, in, 11); \
  163. SHA2_STEP2(E, F, G, H, A, B, C, D, in, 12); \
  164. SHA2_STEP2(D, E, F, G, H, A, B, C, in, 13); \
  165. SHA2_STEP2(C, D, E, F, G, H, A, B, in, 14); \
  166. SHA2_STEP2(B, C, D, E, F, G, H, A, in, 15); \
  167. } \
  168. (r)[0] = SPH_T32((r)[0] + A); \
  169. (r)[1] = SPH_T32((r)[1] + B); \
  170. (r)[2] = SPH_T32((r)[2] + C); \
  171. (r)[3] = SPH_T32((r)[3] + D); \
  172. (r)[4] = SPH_T32((r)[4] + E); \
  173. (r)[5] = SPH_T32((r)[5] + F); \
  174. (r)[6] = SPH_T32((r)[6] + G); \
  175. (r)[7] = SPH_T32((r)[7] + H); \
  176. } while (0)
  177. #else
  178. #define SHA2_ROUND_BODY(in, r) do { \
  179. sph_u32 A, B, C, D, E, F, G, H, T1, T2; \
  180. sph_u32 W00, W01, W02, W03, W04, W05, W06, W07; \
  181. sph_u32 W08, W09, W10, W11, W12, W13, W14, W15; \
  182. \
  183. A = (r)[0]; \
  184. B = (r)[1]; \
  185. C = (r)[2]; \
  186. D = (r)[3]; \
  187. E = (r)[4]; \
  188. F = (r)[5]; \
  189. G = (r)[6]; \
  190. H = (r)[7]; \
  191. W00 = in(0); \
  192. T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
  193. + SPH_C32(0x428A2F98) + W00); \
  194. T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
  195. D = SPH_T32(D + T1); \
  196. H = SPH_T32(T1 + T2); \
  197. W01 = in(1); \
  198. T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
  199. + SPH_C32(0x71374491) + W01); \
  200. T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
  201. C = SPH_T32(C + T1); \
  202. G = SPH_T32(T1 + T2); \
  203. W02 = in(2); \
  204. T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
  205. + SPH_C32(0xB5C0FBCF) + W02); \
  206. T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
  207. B = SPH_T32(B + T1); \
  208. F = SPH_T32(T1 + T2); \
  209. W03 = in(3); \
  210. T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
  211. + SPH_C32(0xE9B5DBA5) + W03); \
  212. T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
  213. A = SPH_T32(A + T1); \
  214. E = SPH_T32(T1 + T2); \
  215. W04 = in(4); \
  216. T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
  217. + SPH_C32(0x3956C25B) + W04); \
  218. T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
  219. H = SPH_T32(H + T1); \
  220. D = SPH_T32(T1 + T2); \
  221. W05 = in(5); \
  222. T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
  223. + SPH_C32(0x59F111F1) + W05); \
  224. T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
  225. G = SPH_T32(G + T1); \
  226. C = SPH_T32(T1 + T2); \
  227. W06 = in(6); \
  228. T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
  229. + SPH_C32(0x923F82A4) + W06); \
  230. T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
  231. F = SPH_T32(F + T1); \
  232. B = SPH_T32(T1 + T2); \
  233. W07 = in(7); \
  234. T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
  235. + SPH_C32(0xAB1C5ED5) + W07); \
  236. T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
  237. E = SPH_T32(E + T1); \
  238. A = SPH_T32(T1 + T2); \
  239. W08 = in(8); \
  240. T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
  241. + SPH_C32(0xD807AA98) + W08); \
  242. T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
  243. D = SPH_T32(D + T1); \
  244. H = SPH_T32(T1 + T2); \
  245. W09 = in(9); \
  246. T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
  247. + SPH_C32(0x12835B01) + W09); \
  248. T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
  249. C = SPH_T32(C + T1); \
  250. G = SPH_T32(T1 + T2); \
  251. W10 = in(10); \
  252. T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
  253. + SPH_C32(0x243185BE) + W10); \
  254. T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
  255. B = SPH_T32(B + T1); \
  256. F = SPH_T32(T1 + T2); \
  257. W11 = in(11); \
  258. T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
  259. + SPH_C32(0x550C7DC3) + W11); \
  260. T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
  261. A = SPH_T32(A + T1); \
  262. E = SPH_T32(T1 + T2); \
  263. W12 = in(12); \
  264. T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
  265. + SPH_C32(0x72BE5D74) + W12); \
  266. T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
  267. H = SPH_T32(H + T1); \
  268. D = SPH_T32(T1 + T2); \
  269. W13 = in(13); \
  270. T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
  271. + SPH_C32(0x80DEB1FE) + W13); \
  272. T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
  273. G = SPH_T32(G + T1); \
  274. C = SPH_T32(T1 + T2); \
  275. W14 = in(14); \
  276. T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
  277. + SPH_C32(0x9BDC06A7) + W14); \
  278. T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
  279. F = SPH_T32(F + T1); \
  280. B = SPH_T32(T1 + T2); \
  281. W15 = in(15); \
  282. T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
  283. + SPH_C32(0xC19BF174) + W15); \
  284. T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
  285. E = SPH_T32(E + T1); \
  286. A = SPH_T32(T1 + T2); \
  287. W00 = SPH_T32(SSG2_1(W14) + W09 + SSG2_0(W01) + W00); \
  288. T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
  289. + SPH_C32(0xE49B69C1) + W00); \
  290. T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
  291. D = SPH_T32(D + T1); \
  292. H = SPH_T32(T1 + T2); \
  293. W01 = SPH_T32(SSG2_1(W15) + W10 + SSG2_0(W02) + W01); \
  294. T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
  295. + SPH_C32(0xEFBE4786) + W01); \
  296. T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
  297. C = SPH_T32(C + T1); \
  298. G = SPH_T32(T1 + T2); \
  299. W02 = SPH_T32(SSG2_1(W00) + W11 + SSG2_0(W03) + W02); \
  300. T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
  301. + SPH_C32(0x0FC19DC6) + W02); \
  302. T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
  303. B = SPH_T32(B + T1); \
  304. F = SPH_T32(T1 + T2); \
  305. W03 = SPH_T32(SSG2_1(W01) + W12 + SSG2_0(W04) + W03); \
  306. T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
  307. + SPH_C32(0x240CA1CC) + W03); \
  308. T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
  309. A = SPH_T32(A + T1); \
  310. E = SPH_T32(T1 + T2); \
  311. W04 = SPH_T32(SSG2_1(W02) + W13 + SSG2_0(W05) + W04); \
  312. T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
  313. + SPH_C32(0x2DE92C6F) + W04); \
  314. T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
  315. H = SPH_T32(H + T1); \
  316. D = SPH_T32(T1 + T2); \
  317. W05 = SPH_T32(SSG2_1(W03) + W14 + SSG2_0(W06) + W05); \
  318. T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
  319. + SPH_C32(0x4A7484AA) + W05); \
  320. T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
  321. G = SPH_T32(G + T1); \
  322. C = SPH_T32(T1 + T2); \
  323. W06 = SPH_T32(SSG2_1(W04) + W15 + SSG2_0(W07) + W06); \
  324. T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
  325. + SPH_C32(0x5CB0A9DC) + W06); \
  326. T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
  327. F = SPH_T32(F + T1); \
  328. B = SPH_T32(T1 + T2); \
  329. W07 = SPH_T32(SSG2_1(W05) + W00 + SSG2_0(W08) + W07); \
  330. T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
  331. + SPH_C32(0x76F988DA) + W07); \
  332. T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
  333. E = SPH_T32(E + T1); \
  334. A = SPH_T32(T1 + T2); \
  335. W08 = SPH_T32(SSG2_1(W06) + W01 + SSG2_0(W09) + W08); \
  336. T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
  337. + SPH_C32(0x983E5152) + W08); \
  338. T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
  339. D = SPH_T32(D + T1); \
  340. H = SPH_T32(T1 + T2); \
  341. W09 = SPH_T32(SSG2_1(W07) + W02 + SSG2_0(W10) + W09); \
  342. T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
  343. + SPH_C32(0xA831C66D) + W09); \
  344. T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
  345. C = SPH_T32(C + T1); \
  346. G = SPH_T32(T1 + T2); \
  347. W10 = SPH_T32(SSG2_1(W08) + W03 + SSG2_0(W11) + W10); \
  348. T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
  349. + SPH_C32(0xB00327C8) + W10); \
  350. T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
  351. B = SPH_T32(B + T1); \
  352. F = SPH_T32(T1 + T2); \
  353. W11 = SPH_T32(SSG2_1(W09) + W04 + SSG2_0(W12) + W11); \
  354. T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
  355. + SPH_C32(0xBF597FC7) + W11); \
  356. T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
  357. A = SPH_T32(A + T1); \
  358. E = SPH_T32(T1 + T2); \
  359. W12 = SPH_T32(SSG2_1(W10) + W05 + SSG2_0(W13) + W12); \
  360. T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
  361. + SPH_C32(0xC6E00BF3) + W12); \
  362. T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
  363. H = SPH_T32(H + T1); \
  364. D = SPH_T32(T1 + T2); \
  365. W13 = SPH_T32(SSG2_1(W11) + W06 + SSG2_0(W14) + W13); \
  366. T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
  367. + SPH_C32(0xD5A79147) + W13); \
  368. T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
  369. G = SPH_T32(G + T1); \
  370. C = SPH_T32(T1 + T2); \
  371. W14 = SPH_T32(SSG2_1(W12) + W07 + SSG2_0(W15) + W14); \
  372. T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
  373. + SPH_C32(0x06CA6351) + W14); \
  374. T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
  375. F = SPH_T32(F + T1); \
  376. B = SPH_T32(T1 + T2); \
  377. W15 = SPH_T32(SSG2_1(W13) + W08 + SSG2_0(W00) + W15); \
  378. T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
  379. + SPH_C32(0x14292967) + W15); \
  380. T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
  381. E = SPH_T32(E + T1); \
  382. A = SPH_T32(T1 + T2); \
  383. W00 = SPH_T32(SSG2_1(W14) + W09 + SSG2_0(W01) + W00); \
  384. T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
  385. + SPH_C32(0x27B70A85) + W00); \
  386. T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
  387. D = SPH_T32(D + T1); \
  388. H = SPH_T32(T1 + T2); \
  389. W01 = SPH_T32(SSG2_1(W15) + W10 + SSG2_0(W02) + W01); \
  390. T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
  391. + SPH_C32(0x2E1B2138) + W01); \
  392. T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
  393. C = SPH_T32(C + T1); \
  394. G = SPH_T32(T1 + T2); \
  395. W02 = SPH_T32(SSG2_1(W00) + W11 + SSG2_0(W03) + W02); \
  396. T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
  397. + SPH_C32(0x4D2C6DFC) + W02); \
  398. T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
  399. B = SPH_T32(B + T1); \
  400. F = SPH_T32(T1 + T2); \
  401. W03 = SPH_T32(SSG2_1(W01) + W12 + SSG2_0(W04) + W03); \
  402. T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
  403. + SPH_C32(0x53380D13) + W03); \
  404. T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
  405. A = SPH_T32(A + T1); \
  406. E = SPH_T32(T1 + T2); \
  407. W04 = SPH_T32(SSG2_1(W02) + W13 + SSG2_0(W05) + W04); \
  408. T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
  409. + SPH_C32(0x650A7354) + W04); \
  410. T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
  411. H = SPH_T32(H + T1); \
  412. D = SPH_T32(T1 + T2); \
  413. W05 = SPH_T32(SSG2_1(W03) + W14 + SSG2_0(W06) + W05); \
  414. T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
  415. + SPH_C32(0x766A0ABB) + W05); \
  416. T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
  417. G = SPH_T32(G + T1); \
  418. C = SPH_T32(T1 + T2); \
  419. W06 = SPH_T32(SSG2_1(W04) + W15 + SSG2_0(W07) + W06); \
  420. T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
  421. + SPH_C32(0x81C2C92E) + W06); \
  422. T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
  423. F = SPH_T32(F + T1); \
  424. B = SPH_T32(T1 + T2); \
  425. W07 = SPH_T32(SSG2_1(W05) + W00 + SSG2_0(W08) + W07); \
  426. T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
  427. + SPH_C32(0x92722C85) + W07); \
  428. T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
  429. E = SPH_T32(E + T1); \
  430. A = SPH_T32(T1 + T2); \
  431. W08 = SPH_T32(SSG2_1(W06) + W01 + SSG2_0(W09) + W08); \
  432. T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
  433. + SPH_C32(0xA2BFE8A1) + W08); \
  434. T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
  435. D = SPH_T32(D + T1); \
  436. H = SPH_T32(T1 + T2); \
  437. W09 = SPH_T32(SSG2_1(W07) + W02 + SSG2_0(W10) + W09); \
  438. T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
  439. + SPH_C32(0xA81A664B) + W09); \
  440. T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
  441. C = SPH_T32(C + T1); \
  442. G = SPH_T32(T1 + T2); \
  443. W10 = SPH_T32(SSG2_1(W08) + W03 + SSG2_0(W11) + W10); \
  444. T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
  445. + SPH_C32(0xC24B8B70) + W10); \
  446. T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
  447. B = SPH_T32(B + T1); \
  448. F = SPH_T32(T1 + T2); \
  449. W11 = SPH_T32(SSG2_1(W09) + W04 + SSG2_0(W12) + W11); \
  450. T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
  451. + SPH_C32(0xC76C51A3) + W11); \
  452. T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
  453. A = SPH_T32(A + T1); \
  454. E = SPH_T32(T1 + T2); \
  455. W12 = SPH_T32(SSG2_1(W10) + W05 + SSG2_0(W13) + W12); \
  456. T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
  457. + SPH_C32(0xD192E819) + W12); \
  458. T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
  459. H = SPH_T32(H + T1); \
  460. D = SPH_T32(T1 + T2); \
  461. W13 = SPH_T32(SSG2_1(W11) + W06 + SSG2_0(W14) + W13); \
  462. T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
  463. + SPH_C32(0xD6990624) + W13); \
  464. T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
  465. G = SPH_T32(G + T1); \
  466. C = SPH_T32(T1 + T2); \
  467. W14 = SPH_T32(SSG2_1(W12) + W07 + SSG2_0(W15) + W14); \
  468. T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
  469. + SPH_C32(0xF40E3585) + W14); \
  470. T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
  471. F = SPH_T32(F + T1); \
  472. B = SPH_T32(T1 + T2); \
  473. W15 = SPH_T32(SSG2_1(W13) + W08 + SSG2_0(W00) + W15); \
  474. T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
  475. + SPH_C32(0x106AA070) + W15); \
  476. T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
  477. E = SPH_T32(E + T1); \
  478. A = SPH_T32(T1 + T2); \
  479. W00 = SPH_T32(SSG2_1(W14) + W09 + SSG2_0(W01) + W00); \
  480. T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
  481. + SPH_C32(0x19A4C116) + W00); \
  482. T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
  483. D = SPH_T32(D + T1); \
  484. H = SPH_T32(T1 + T2); \
  485. W01 = SPH_T32(SSG2_1(W15) + W10 + SSG2_0(W02) + W01); \
  486. T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
  487. + SPH_C32(0x1E376C08) + W01); \
  488. T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
  489. C = SPH_T32(C + T1); \
  490. G = SPH_T32(T1 + T2); \
  491. W02 = SPH_T32(SSG2_1(W00) + W11 + SSG2_0(W03) + W02); \
  492. T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
  493. + SPH_C32(0x2748774C) + W02); \
  494. T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
  495. B = SPH_T32(B + T1); \
  496. F = SPH_T32(T1 + T2); \
  497. W03 = SPH_T32(SSG2_1(W01) + W12 + SSG2_0(W04) + W03); \
  498. T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
  499. + SPH_C32(0x34B0BCB5) + W03); \
  500. T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
  501. A = SPH_T32(A + T1); \
  502. E = SPH_T32(T1 + T2); \
  503. W04 = SPH_T32(SSG2_1(W02) + W13 + SSG2_0(W05) + W04); \
  504. T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
  505. + SPH_C32(0x391C0CB3) + W04); \
  506. T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
  507. H = SPH_T32(H + T1); \
  508. D = SPH_T32(T1 + T2); \
  509. W05 = SPH_T32(SSG2_1(W03) + W14 + SSG2_0(W06) + W05); \
  510. T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
  511. + SPH_C32(0x4ED8AA4A) + W05); \
  512. T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
  513. G = SPH_T32(G + T1); \
  514. C = SPH_T32(T1 + T2); \
  515. W06 = SPH_T32(SSG2_1(W04) + W15 + SSG2_0(W07) + W06); \
  516. T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
  517. + SPH_C32(0x5B9CCA4F) + W06); \
  518. T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
  519. F = SPH_T32(F + T1); \
  520. B = SPH_T32(T1 + T2); \
  521. W07 = SPH_T32(SSG2_1(W05) + W00 + SSG2_0(W08) + W07); \
  522. T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
  523. + SPH_C32(0x682E6FF3) + W07); \
  524. T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
  525. E = SPH_T32(E + T1); \
  526. A = SPH_T32(T1 + T2); \
  527. W08 = SPH_T32(SSG2_1(W06) + W01 + SSG2_0(W09) + W08); \
  528. T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
  529. + SPH_C32(0x748F82EE) + W08); \
  530. T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
  531. D = SPH_T32(D + T1); \
  532. H = SPH_T32(T1 + T2); \
  533. W09 = SPH_T32(SSG2_1(W07) + W02 + SSG2_0(W10) + W09); \
  534. T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
  535. + SPH_C32(0x78A5636F) + W09); \
  536. T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
  537. C = SPH_T32(C + T1); \
  538. G = SPH_T32(T1 + T2); \
  539. W10 = SPH_T32(SSG2_1(W08) + W03 + SSG2_0(W11) + W10); \
  540. T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
  541. + SPH_C32(0x84C87814) + W10); \
  542. T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
  543. B = SPH_T32(B + T1); \
  544. F = SPH_T32(T1 + T2); \
  545. W11 = SPH_T32(SSG2_1(W09) + W04 + SSG2_0(W12) + W11); \
  546. T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
  547. + SPH_C32(0x8CC70208) + W11); \
  548. T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
  549. A = SPH_T32(A + T1); \
  550. E = SPH_T32(T1 + T2); \
  551. W12 = SPH_T32(SSG2_1(W10) + W05 + SSG2_0(W13) + W12); \
  552. T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
  553. + SPH_C32(0x90BEFFFA) + W12); \
  554. T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
  555. H = SPH_T32(H + T1); \
  556. D = SPH_T32(T1 + T2); \
  557. W13 = SPH_T32(SSG2_1(W11) + W06 + SSG2_0(W14) + W13); \
  558. T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
  559. + SPH_C32(0xA4506CEB) + W13); \
  560. T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
  561. G = SPH_T32(G + T1); \
  562. C = SPH_T32(T1 + T2); \
  563. W14 = SPH_T32(SSG2_1(W12) + W07 + SSG2_0(W15) + W14); \
  564. T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
  565. + SPH_C32(0xBEF9A3F7) + W14); \
  566. T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
  567. F = SPH_T32(F + T1); \
  568. B = SPH_T32(T1 + T2); \
  569. W15 = SPH_T32(SSG2_1(W13) + W08 + SSG2_0(W00) + W15); \
  570. T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
  571. + SPH_C32(0xC67178F2) + W15); \
  572. T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
  573. E = SPH_T32(E + T1); \
  574. A = SPH_T32(T1 + T2); \
  575. (r)[0] = SPH_T32((r)[0] + A); \
  576. (r)[1] = SPH_T32((r)[1] + B); \
  577. (r)[2] = SPH_T32((r)[2] + C); \
  578. (r)[3] = SPH_T32((r)[3] + D); \
  579. (r)[4] = SPH_T32((r)[4] + E); \
  580. (r)[5] = SPH_T32((r)[5] + F); \
  581. (r)[6] = SPH_T32((r)[6] + G); \
  582. (r)[7] = SPH_T32((r)[7] + H); \
  583. } while (0)
  584. #endif
  585. /*
  586. * One round of SHA-224 / SHA-256. The data must be aligned for 32-bit access.
  587. */
  588. static void
  589. sha2_round(const unsigned char *data, sph_u32 r[8])
  590. {
  591. #define SHA2_IN(x) sph_dec32be_aligned(data + (4 * (x)))
  592. SHA2_ROUND_BODY(SHA2_IN, r);
  593. #undef SHA2_IN
  594. }
  595. /* see sph_sha2.h */
  596. void
  597. sph_sha224_init(void *cc)
  598. {
  599. sph_sha224_context *sc;
  600. sc = cc;
  601. memcpy(sc->val, H224, sizeof H224);
  602. #if SPH_64
  603. sc->count = 0;
  604. #else
  605. sc->count_high = sc->count_low = 0;
  606. #endif
  607. }
  608. /* see sph_sha2.h */
  609. void
  610. sph_sha256_init(void *cc)
  611. {
  612. sph_sha256_context *sc;
  613. sc = cc;
  614. memcpy(sc->val, H256, sizeof H256);
  615. #if SPH_64
  616. sc->count = 0;
  617. #else
  618. sc->count_high = sc->count_low = 0;
  619. #endif
  620. }
  621. #define RFUN sha2_round
  622. #define HASH sha224
  623. #define BE32 1
  624. #include "md_helper.c"
  625. /* see sph_sha2.h */
  626. void
  627. sph_sha224_close(void *cc, void *dst)
  628. {
  629. sha224_close(cc, dst, 7);
  630. sph_sha224_init(cc);
  631. }
  632. /* see sph_sha2.h */
  633. void
  634. sph_sha224_addbits_and_close(void *cc, unsigned ub, unsigned n, void *dst)
  635. {
  636. sha224_addbits_and_close(cc, ub, n, dst, 7);
  637. sph_sha224_init(cc);
  638. }
  639. /* see sph_sha2.h */
  640. void
  641. sph_sha256_close(void *cc, void *dst)
  642. {
  643. sha224_close(cc, dst, 8);
  644. sph_sha256_init(cc);
  645. }
  646. /* see sph_sha2.h */
  647. void
  648. sph_sha256_addbits_and_close(void *cc, unsigned ub, unsigned n, void *dst)
  649. {
  650. sha224_addbits_and_close(cc, ub, n, dst, 8);
  651. sph_sha256_init(cc);
  652. }
  653. /* see sph_sha2.h */
  654. void
  655. sph_sha224_comp(const sph_u32 msg[16], sph_u32 val[8])
  656. {
  657. #define SHA2_IN(x) msg[x]
  658. SHA2_ROUND_BODY(SHA2_IN, val);
  659. #undef SHA2_IN
  660. }