base64.c 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965
  1. /*
  2. +----------------------------------------------------------------------+
  3. | Copyright (c) The PHP Group |
  4. +----------------------------------------------------------------------+
  5. | This source file is subject to version 3.01 of the PHP license, |
  6. | that is bundled with this package in the file LICENSE, and is |
  7. | available through the world-wide-web at the following url: |
  8. | https://www.php.net/license/3_01.txt |
  9. | If you did not receive a copy of the PHP license and are unable to |
  10. | obtain it through the world-wide-web, please send a note to |
  11. | license@php.net so we can mail you a copy immediately. |
  12. +----------------------------------------------------------------------+
  13. | Author: Jim Winstead <jimw@php.net> |
  14. | Xinchen Hui <laruence@php.net> |
  15. +----------------------------------------------------------------------+
  16. */
  17. #include <string.h>
  18. #include "php.h"
  19. #include "base64.h"
  20. /* {{{ base64 tables */
  21. static const char base64_table[] = {
  22. 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
  23. 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
  24. 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm',
  25. 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
  26. '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/', '\0'
  27. };
  28. static const char base64_pad = '=';
  29. static const short base64_reverse_table[256] = {
  30. -2, -2, -2, -2, -2, -2, -2, -2, -2, -1, -1, -2, -2, -1, -2, -2,
  31. -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2,
  32. -1, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, 62, -2, -2, -2, 63,
  33. 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -2, -2, -2, -2, -2, -2,
  34. -2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
  35. 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -2, -2, -2, -2, -2,
  36. -2, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
  37. 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -2, -2, -2, -2, -2,
  38. -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2,
  39. -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2,
  40. -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2,
  41. -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2,
  42. -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2,
  43. -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2,
  44. -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2,
  45. -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2
  46. };
  47. /* }}} */
  48. #ifdef __aarch64__
  49. #include <arm_neon.h>
  50. static zend_always_inline uint8x16_t encode_toascii(const uint8x16_t input, const uint8x16x2_t shift_LUT)
  51. {
  52. /* reduce 0..51 -> 0
  53. 52..61 -> 1 .. 10
  54. 62 -> 11
  55. 63 -> 12 */
  56. uint8x16_t result = vqsubq_u8(input, vdupq_n_u8(51));
  57. /* distinguish between ranges 0..25 and 26..51:
  58. 0 .. 25 -> remains 0
  59. 26 .. 51 -> becomes 13 */
  60. const uint8x16_t less = vcgtq_u8(vdupq_n_u8(26), input);
  61. result = vorrq_u8(result, vandq_u8(less, vdupq_n_u8(13)));
  62. /* read shift */
  63. result = vqtbl2q_u8(shift_LUT, result);
  64. return vaddq_u8(result, input);
  65. }
  66. static zend_always_inline unsigned char *neon_base64_encode(const unsigned char *in, size_t inl, unsigned char *out, size_t *left)
  67. {
  68. const uint8_t shift_LUT_[32] = {'a' - 26, '0' - 52, '0' - 52, '0' - 52,
  69. '0' - 52, '0' - 52, '0' - 52, '0' - 52,
  70. '0' - 52, '0' - 52, '0' - 52, '+' - 62,
  71. '/' - 63, 'A', 0, 0,
  72. 'a' - 26, '0' - 52, '0' - 52, '0' - 52,
  73. '0' - 52, '0' - 52, '0' - 52, '0' - 52,
  74. '0' - 52, '0' - 52, '0' - 52, '+' - 62,
  75. '/' - 63, 'A', 0, 0};
  76. const uint8x16x2_t shift_LUT = *((const uint8x16x2_t *)shift_LUT_);
  77. do {
  78. /* [ccdddddd | bbbbcccc | aaaaaabb]
  79. x.val[2] | x.val[1] | x.val[0] */
  80. const uint8x16x3_t x = vld3q_u8((const uint8_t *)(in));
  81. /* [00aa_aaaa] */
  82. const uint8x16_t field_a = vshrq_n_u8(x.val[0], 2);
  83. const uint8x16_t field_b = /* [00bb_bbbb] */
  84. vbslq_u8(vdupq_n_u8(0x30), /* [0011_0000] */
  85. vshlq_n_u8(x.val[0], 4), /* [aabb_0000] */
  86. vshrq_n_u8(x.val[1], 4)); /* [0000_bbbb] */
  87. const uint8x16_t field_c = /* [00cc_cccc] */
  88. vbslq_u8(vdupq_n_u8(0x3c), /* [0011_1100] */
  89. vshlq_n_u8(x.val[1], 2), /* [bbcc_cc00] */
  90. vshrq_n_u8(x.val[2], 6)); /* [0000_00cc] */
  91. /* [00dd_dddd] */
  92. const uint8x16_t field_d = vandq_u8(x.val[2], vdupq_n_u8(0x3f));
  93. uint8x16x4_t result;
  94. result.val[0] = encode_toascii(field_a, shift_LUT);
  95. result.val[1] = encode_toascii(field_b, shift_LUT);
  96. result.val[2] = encode_toascii(field_c, shift_LUT);
  97. result.val[3] = encode_toascii(field_d, shift_LUT);
  98. vst4q_u8((uint8_t *)out, result);
  99. out += 64;
  100. in += 16 * 3;
  101. inl -= 16 * 3;
  102. } while (inl >= 16 * 3);
  103. *left = inl;
  104. return out;
  105. }
  106. #endif /* __aarch64__ */
  107. static zend_always_inline unsigned char *php_base64_encode_impl(const unsigned char *in, size_t inl, unsigned char *out) /* {{{ */
  108. {
  109. #ifdef __aarch64__
  110. if (inl >= 16 * 3) {
  111. size_t left = 0;
  112. out = neon_base64_encode(in, inl, out, &left);
  113. in += inl - left;
  114. inl = left;
  115. }
  116. #endif
  117. while (inl > 2) { /* keep going until we have less than 24 bits */
  118. *out++ = base64_table[in[0] >> 2];
  119. *out++ = base64_table[((in[0] & 0x03) << 4) + (in[1] >> 4)];
  120. *out++ = base64_table[((in[1] & 0x0f) << 2) + (in[2] >> 6)];
  121. *out++ = base64_table[in[2] & 0x3f];
  122. in += 3;
  123. inl -= 3; /* we just handle 3 octets of data */
  124. }
  125. /* now deal with the tail end of things */
  126. if (inl != 0) {
  127. *out++ = base64_table[in[0] >> 2];
  128. if (inl > 1) {
  129. *out++ = base64_table[((in[0] & 0x03) << 4) + (in[1] >> 4)];
  130. *out++ = base64_table[(in[1] & 0x0f) << 2];
  131. *out++ = base64_pad;
  132. } else {
  133. *out++ = base64_table[(in[0] & 0x03) << 4];
  134. *out++ = base64_pad;
  135. *out++ = base64_pad;
  136. }
  137. }
  138. *out = '\0';
  139. return out;
  140. }
  141. /* }}} */
  142. #ifdef __aarch64__
  143. static zend_always_inline uint8x16_t decode_fromascii(const uint8x16_t input, uint8x16_t *error, const uint8x16x2_t shiftLUT, const uint8x16x2_t maskLUT, const uint8x16x2_t bitposLUT) {
  144. const uint8x16_t higher_nibble = vshrq_n_u8(input, 4);
  145. const uint8x16_t lower_nibble = vandq_u8(input, vdupq_n_u8(0x0f));
  146. const uint8x16_t sh = vqtbl2q_u8(shiftLUT, higher_nibble);
  147. const uint8x16_t eq_2f = vceqq_u8(input, vdupq_n_u8(0x2f));
  148. const uint8x16_t shift = vbslq_u8(eq_2f, vdupq_n_u8(16), sh);
  149. const uint8x16_t M = vqtbl2q_u8(maskLUT, lower_nibble);
  150. const uint8x16_t bit = vqtbl2q_u8(bitposLUT, higher_nibble);
  151. *error = vceqq_u8(vandq_u8(M, bit), vdupq_n_u8(0));
  152. return vaddq_u8(input, shift);
  153. }
  154. static zend_always_inline size_t neon_base64_decode(const unsigned char *in, size_t inl, unsigned char *out, size_t *left) {
  155. unsigned char *out_orig = out;
  156. const uint8_t shiftLUT_[32] = {
  157. 0, 0, 19, 4, (uint8_t)-65, (uint8_t)-65, (uint8_t)-71, (uint8_t)-71,
  158. 0, 0, 0, 0, 0, 0, 0, 0,
  159. 0, 0, 19, 4, (uint8_t)-65, (uint8_t)-65, (uint8_t)-71, (uint8_t)-71,
  160. 0, 0, 0, 0, 0, 0, 0, 0};
  161. const uint8_t maskLUT_[32] = {
  162. /* 0 : 0b1010_1000*/ 0xa8,
  163. /* 1 .. 9 : 0b1111_1000*/ 0xf8, 0xf8, 0xf8, 0xf8, 0xf8, 0xf8, 0xf8, 0xf8, 0xf8,
  164. /* 10 : 0b1111_0000*/ 0xf0,
  165. /* 11 : 0b0101_0100*/ 0x54,
  166. /* 12 .. 14 : 0b0101_0000*/ 0x50, 0x50, 0x50,
  167. /* 15 : 0b0101_0100*/ 0x54,
  168. /* 0 : 0b1010_1000*/ 0xa8,
  169. /* 1 .. 9 : 0b1111_1000*/ 0xf8, 0xf8, 0xf8, 0xf8, 0xf8, 0xf8, 0xf8, 0xf8, 0xf8,
  170. /* 10 : 0b1111_0000*/ 0xf0,
  171. /* 11 : 0b0101_0100*/ 0x54,
  172. /* 12 .. 14 : 0b0101_0000*/ 0x50, 0x50, 0x50,
  173. /* 15 : 0b0101_0100*/ 0x54
  174. };
  175. const uint8_t bitposLUT_[32] = {
  176. 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80,
  177. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  178. 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80,
  179. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
  180. };
  181. const uint8x16x2_t shiftLUT = *((const uint8x16x2_t *)shiftLUT_);
  182. const uint8x16x2_t maskLUT = *((const uint8x16x2_t *)maskLUT_);
  183. const uint8x16x2_t bitposLUT = *((const uint8x16x2_t *)bitposLUT_);;
  184. do {
  185. const uint8x16x4_t x = vld4q_u8((const unsigned char *)in);
  186. uint8x16_t error_a;
  187. uint8x16_t error_b;
  188. uint8x16_t error_c;
  189. uint8x16_t error_d;
  190. uint8x16_t field_a = decode_fromascii(x.val[0], &error_a, shiftLUT, maskLUT, bitposLUT);
  191. uint8x16_t field_b = decode_fromascii(x.val[1], &error_b, shiftLUT, maskLUT, bitposLUT);
  192. uint8x16_t field_c = decode_fromascii(x.val[2], &error_c, shiftLUT, maskLUT, bitposLUT);
  193. uint8x16_t field_d = decode_fromascii(x.val[3], &error_d, shiftLUT, maskLUT, bitposLUT);
  194. const uint8x16_t err = vorrq_u8(vorrq_u8(error_a, error_b), vorrq_u8(error_c, error_d));
  195. union {uint8_t mem[16]; uint64_t dw[2]; } error;
  196. vst1q_u8(error.mem, err);
  197. /* Check that the input only contains bytes belonging to the alphabet of
  198. Base64. If there are errors, decode the rest of the string with the
  199. scalar decoder. */
  200. if (error.dw[0] | error.dw[1])
  201. break;
  202. uint8x16x3_t result;
  203. result.val[0] = vorrq_u8(vshrq_n_u8(field_b, 4), vshlq_n_u8(field_a, 2));
  204. result.val[1] = vorrq_u8(vshrq_n_u8(field_c, 2), vshlq_n_u8(field_b, 4));
  205. result.val[2] = vorrq_u8(field_d, vshlq_n_u8(field_c, 6));
  206. vst3q_u8((unsigned char *)out, result);
  207. out += 16 * 3;
  208. in += 16 * 4;
  209. inl -= 16 * 4;
  210. } while (inl >= 16 * 4);
  211. *left = inl;
  212. return out - out_orig;
  213. }
  214. #endif /* __aarch64__ */
  215. static zend_always_inline int php_base64_decode_impl(const unsigned char *in, size_t inl, unsigned char *out, size_t *outl, bool strict) /* {{{ */
  216. {
  217. int ch;
  218. size_t i = 0, padding = 0, j = *outl;
  219. #ifdef __aarch64__
  220. if (inl >= 16 * 4) {
  221. size_t left = 0;
  222. j += neon_base64_decode(in, inl, out, &left);
  223. i = inl - left;
  224. in += i;
  225. inl = left;
  226. }
  227. #endif
  228. /* run through the whole string, converting as we go */
  229. while (inl-- > 0) {
  230. ch = *in++;
  231. if (ch == base64_pad) {
  232. padding++;
  233. continue;
  234. }
  235. ch = base64_reverse_table[ch];
  236. if (!strict) {
  237. /* skip unknown characters and whitespace */
  238. if (ch < 0) {
  239. continue;
  240. }
  241. } else {
  242. /* skip whitespace */
  243. if (ch == -1) {
  244. continue;
  245. }
  246. /* fail on bad characters or if any data follows padding */
  247. if (ch == -2 || padding) {
  248. goto fail;
  249. }
  250. }
  251. switch (i % 4) {
  252. case 0:
  253. out[j] = ch << 2;
  254. break;
  255. case 1:
  256. out[j++] |= ch >> 4;
  257. out[j] = (ch & 0x0f) << 4;
  258. break;
  259. case 2:
  260. out[j++] |= ch >>2;
  261. out[j] = (ch & 0x03) << 6;
  262. break;
  263. case 3:
  264. out[j++] |= ch;
  265. break;
  266. }
  267. i++;
  268. }
  269. /* fail if the input is truncated (only one char in last group) */
  270. if (strict && i % 4 == 1) {
  271. goto fail;
  272. }
  273. /* fail if the padding length is wrong (not VV==, VVV=), but accept zero padding
  274. * RFC 4648: "In some circumstances, the use of padding [--] is not required" */
  275. if (strict && padding && (padding > 2 || (i + padding) % 4 != 0)) {
  276. goto fail;
  277. }
  278. *outl = j;
  279. out[j] = '\0';
  280. return 1;
  281. fail:
  282. return 0;
  283. }
  284. /* }}} */
  285. /* {{{ php_base64_encode */
  286. #if ZEND_INTRIN_AVX2_NATIVE
  287. # undef ZEND_INTRIN_SSSE3_NATIVE
  288. # undef ZEND_INTRIN_SSSE3_RESOLVER
  289. # undef ZEND_INTRIN_SSSE3_FUNC_PROTO
  290. # undef ZEND_INTRIN_SSSE3_FUNC_PTR
  291. #elif ZEND_INTRIN_AVX2_FUNC_PROTO && ZEND_INTRIN_SSSE3_NATIVE
  292. # undef ZEND_INTRIN_SSSE3_NATIVE
  293. # undef ZEND_INTRIN_SSSE3_RESOLVER
  294. # define ZEND_INTRIN_SSSE3_RESOLVER 1
  295. # define ZEND_INTRIN_SSSE3_FUNC_PROTO 1
  296. # undef ZEND_INTRIN_SSSE3_FUNC_DECL
  297. # ifdef HAVE_FUNC_ATTRIBUTE_TARGET
  298. # define ZEND_INTRIN_SSSE3_FUNC_DECL(func) ZEND_API func __attribute__((target("ssse3")))
  299. # else
  300. # define ZEND_INTRIN_SSSE3_FUNC_DECL(func) ZEND_API func
  301. # endif
  302. #elif ZEND_INTRIN_AVX2_FUNC_PTR && ZEND_INTRIN_SSSE3_NATIVE
  303. # undef ZEND_INTRIN_SSSE3_NATIVE
  304. # undef ZEND_INTRIN_SSSE3_RESOLVER
  305. # define ZEND_INTRIN_SSSE3_RESOLVER 1
  306. # define ZEND_INTRIN_SSSE3_FUNC_PTR 1
  307. # undef ZEND_INTRIN_SSSE3_FUNC_DECL
  308. # ifdef HAVE_FUNC_ATTRIBUTE_TARGET
  309. # define ZEND_INTRIN_SSSE3_FUNC_DECL(func) ZEND_API func __attribute__((target("ssse3")))
  310. # else
  311. # define ZEND_INTRIN_SSSE3_FUNC_DECL(func) ZEND_API func
  312. # endif
  313. #endif
  314. #if ZEND_INTRIN_AVX2_NATIVE
  315. # include <immintrin.h>
  316. #elif ZEND_INTRIN_SSSE3_NATIVE
  317. # include <tmmintrin.h>
  318. #elif (ZEND_INTRIN_SSSE3_RESOLVER || ZEND_INTRIN_AVX2_RESOLVER)
  319. # if ZEND_INTRIN_AVX2_RESOLVER
  320. # include <immintrin.h>
  321. # else
  322. # include <tmmintrin.h>
  323. # endif /* (ZEND_INTRIN_SSSE3_RESOLVER || ZEND_INTRIN_AVX2_RESOLVER) */
  324. # include "Zend/zend_cpuinfo.h"
  325. # if ZEND_INTRIN_AVX2_RESOLVER
  326. ZEND_INTRIN_AVX2_FUNC_DECL(zend_string *php_base64_encode_avx2(const unsigned char *str, size_t length));
  327. ZEND_INTRIN_AVX2_FUNC_DECL(zend_string *php_base64_decode_ex_avx2(const unsigned char *str, size_t length, bool strict));
  328. # endif
  329. # if ZEND_INTRIN_SSSE3_RESOLVER
  330. ZEND_INTRIN_SSSE3_FUNC_DECL(zend_string *php_base64_encode_ssse3(const unsigned char *str, size_t length));
  331. ZEND_INTRIN_SSSE3_FUNC_DECL(zend_string *php_base64_decode_ex_ssse3(const unsigned char *str, size_t length, bool strict));
  332. # endif
  333. zend_string *php_base64_encode_default(const unsigned char *str, size_t length);
  334. zend_string *php_base64_decode_ex_default(const unsigned char *str, size_t length, bool strict);
  335. # if (ZEND_INTRIN_AVX2_FUNC_PROTO || ZEND_INTRIN_SSSE3_FUNC_PROTO)
  336. PHPAPI zend_string *php_base64_encode(const unsigned char *str, size_t length) __attribute__((ifunc("resolve_base64_encode")));
  337. PHPAPI zend_string *php_base64_decode_ex(const unsigned char *str, size_t length, bool strict) __attribute__((ifunc("resolve_base64_decode")));
  338. typedef zend_string *(*base64_encode_func_t)(const unsigned char *, size_t);
  339. typedef zend_string *(*base64_decode_func_t)(const unsigned char *, size_t, bool);
  340. ZEND_NO_SANITIZE_ADDRESS
  341. ZEND_ATTRIBUTE_UNUSED /* clang mistakenly warns about this */
  342. static base64_encode_func_t resolve_base64_encode(void) {
  343. # if ZEND_INTRIN_AVX2_FUNC_PROTO
  344. if (zend_cpu_supports_avx2()) {
  345. return php_base64_encode_avx2;
  346. } else
  347. # endif
  348. #if ZEND_INTRIN_SSSE3_FUNC_PROTO
  349. if (zend_cpu_supports_ssse3()) {
  350. return php_base64_encode_ssse3;
  351. }
  352. #endif
  353. return php_base64_encode_default;
  354. }
  355. ZEND_NO_SANITIZE_ADDRESS
  356. ZEND_ATTRIBUTE_UNUSED /* clang mistakenly warns about this */
  357. static base64_decode_func_t resolve_base64_decode(void) {
  358. # if ZEND_INTRIN_AVX2_FUNC_PROTO
  359. if (zend_cpu_supports_avx2()) {
  360. return php_base64_decode_ex_avx2;
  361. } else
  362. # endif
  363. #if ZEND_INTRIN_SSSE3_FUNC_PROTO
  364. if (zend_cpu_supports_ssse3()) {
  365. return php_base64_decode_ex_ssse3;
  366. }
  367. #endif
  368. return php_base64_decode_ex_default;
  369. }
  370. # else /* (ZEND_INTRIN_AVX2_FUNC_PROTO || ZEND_INTRIN_SSSE3_FUNC_PROTO) */
  371. PHPAPI zend_string *(*php_base64_encode_ptr)(const unsigned char *str, size_t length) = NULL;
  372. PHPAPI zend_string *(*php_base64_decode_ex_ptr)(const unsigned char *str, size_t length, bool strict) = NULL;
  373. PHPAPI zend_string *php_base64_encode(const unsigned char *str, size_t length) {
  374. return php_base64_encode_ptr(str, length);
  375. }
  376. PHPAPI zend_string *php_base64_decode_ex(const unsigned char *str, size_t length, bool strict) {
  377. return php_base64_decode_ex_ptr(str, length, strict);
  378. }
  379. PHP_MINIT_FUNCTION(base64_intrin)
  380. {
  381. # if ZEND_INTRIN_AVX2_FUNC_PTR
  382. if (zend_cpu_supports_avx2()) {
  383. php_base64_encode_ptr = php_base64_encode_avx2;
  384. php_base64_decode_ex_ptr = php_base64_decode_ex_avx2;
  385. } else
  386. # endif
  387. #if ZEND_INTRIN_SSSE3_FUNC_PTR
  388. if (zend_cpu_supports_ssse3()) {
  389. php_base64_encode_ptr = php_base64_encode_ssse3;
  390. php_base64_decode_ex_ptr = php_base64_decode_ex_ssse3;
  391. } else
  392. #endif
  393. {
  394. php_base64_encode_ptr = php_base64_encode_default;
  395. php_base64_decode_ex_ptr = php_base64_decode_ex_default;
  396. }
  397. return SUCCESS;
  398. }
  399. # endif /* (ZEND_INTRIN_AVX2_FUNC_PROTO || ZEND_INTRIN_SSSE3_FUNC_PROTO) */
  400. #endif /* ZEND_INTRIN_AVX2_NATIVE */
  401. #if ZEND_INTRIN_AVX2_NATIVE || ZEND_INTRIN_AVX2_RESOLVER
  402. # if ZEND_INTRIN_AVX2_RESOLVER && defined(HAVE_FUNC_ATTRIBUTE_TARGET)
  403. static __m256i php_base64_encode_avx2_reshuffle(__m256i in) __attribute__((target("avx2")));
  404. static __m256i php_base64_encode_avx2_translate(__m256i in) __attribute__((target("avx2")));
  405. # endif
  406. static __m256i php_base64_encode_avx2_reshuffle(__m256i in)
  407. {
  408. /* This one works with shifted (4 bytes) input in order to
  409. * be able to work efficiently in the 2 128-bit lanes */
  410. __m256i t0, t1, t2, t3;
  411. /* input, bytes MSB to LSB:
  412. * 0 0 0 0 x w v u t s r q p o n m
  413. * l k j i h g f e d c b a 0 0 0 0 */
  414. in = _mm256_shuffle_epi8(in, _mm256_set_epi8(
  415. 10, 11, 9, 10,
  416. 7, 8, 6, 7,
  417. 4, 5, 3, 4,
  418. 1, 2, 0, 1,
  419. 14, 15, 13, 14,
  420. 11, 12, 10, 11,
  421. 8, 9, 7, 8,
  422. 5, 6, 4, 5));
  423. t0 = _mm256_and_si256(in, _mm256_set1_epi32(0x0fc0fc00));
  424. t1 = _mm256_mulhi_epu16(t0, _mm256_set1_epi32(0x04000040));
  425. t2 = _mm256_and_si256(in, _mm256_set1_epi32(0x003f03f0));
  426. t3 = _mm256_mullo_epi16(t2, _mm256_set1_epi32(0x01000010));
  427. return _mm256_or_si256(t1, t3);
  428. /* 00xxxxxx 00wwwwXX 00vvWWWW 00VVVVVV
  429. * 00uuuuuu 00ttttUU 00ssTTTT 00SSSSSS
  430. * 00rrrrrr 00qqqqRR 00ppQQQQ 00PPPPPP
  431. * 00oooooo 00nnnnOO 00mmNNNN 00MMMMMM
  432. * 00llllll 00kkkkLL 00jjKKKK 00JJJJJJ
  433. * 00iiiiii 00hhhhII 00ggHHHH 00GGGGGG
  434. * 00ffffff 00eeeeFF 00ddEEEE 00DDDDDD
  435. * 00cccccc 00bbbbCC 00aaBBBB 00AAAAAA */
  436. }
  437. static __m256i php_base64_encode_avx2_translate(__m256i in)
  438. {
  439. __m256i lut, indices, mask;
  440. lut = _mm256_setr_epi8(
  441. 65, 71, -4, -4, -4, -4, -4, -4,
  442. -4, -4, -4, -4, -19, -16, 0, 0,
  443. 65, 71, -4, -4, -4, -4, -4, -4,
  444. -4, -4, -4, -4, -19, -16, 0, 0);
  445. indices = _mm256_subs_epu8(in, _mm256_set1_epi8(51));
  446. mask = _mm256_cmpgt_epi8(in, _mm256_set1_epi8(25));
  447. indices = _mm256_sub_epi8(indices, mask);
  448. return _mm256_add_epi8(in, _mm256_shuffle_epi8(lut, indices));
  449. }
  450. #endif /* ZEND_INTRIN_AVX2_NATIVE || (ZEND_INTRIN_AVX2_RESOLVER && !ZEND_INTRIN_SSSE3_NATIVE) */
  451. #if ZEND_INTRIN_SSSE3_NATIVE || ZEND_INTRIN_SSSE3_RESOLVER
  452. # if ZEND_INTRIN_SSSE3_RESOLVER && defined(HAVE_FUNC_ATTRIBUTE_TARGET)
  453. static __m128i php_base64_encode_ssse3_reshuffle(__m128i in) __attribute__((target("ssse3")));
  454. static __m128i php_base64_encode_ssse3_translate(__m128i in) __attribute__((target("ssse3")));
  455. # endif
  456. static __m128i php_base64_encode_ssse3_reshuffle(__m128i in)
  457. {
  458. __m128i t0, t1, t2, t3;
  459. /* input, bytes MSB to LSB:
  460. * 0 0 0 0 l k j i h g f e d c b a */
  461. in = _mm_shuffle_epi8(in, _mm_set_epi8(
  462. 10, 11, 9, 10,
  463. 7, 8, 6, 7,
  464. 4, 5, 3, 4,
  465. 1, 2, 0, 1));
  466. t0 = _mm_and_si128(in, _mm_set1_epi32(0x0fc0fc00));
  467. t1 = _mm_mulhi_epu16(t0, _mm_set1_epi32(0x04000040));
  468. t2 = _mm_and_si128(in, _mm_set1_epi32(0x003f03f0));
  469. t3 = _mm_mullo_epi16(t2, _mm_set1_epi32(0x01000010));
  470. /* output (upper case are MSB, lower case are LSB):
  471. * 00llllll 00kkkkLL 00jjKKKK 00JJJJJJ
  472. * 00iiiiii 00hhhhII 00ggHHHH 00GGGGGG
  473. * 00ffffff 00eeeeFF 00ddEEEE 00DDDDDD
  474. * 00cccccc 00bbbbCC 00aaBBBB 00AAAAAA */
  475. return _mm_or_si128(t1, t3);
  476. }
  477. static __m128i php_base64_encode_ssse3_translate(__m128i in)
  478. {
  479. __m128i mask, indices;
  480. __m128i lut = _mm_setr_epi8(
  481. 65, 71, -4, -4,
  482. -4, -4, -4, -4,
  483. -4, -4, -4, -4,
  484. -19, -16, 0, 0
  485. );
  486. /* Translate values 0..63 to the Base64 alphabet. There are five sets:
  487. * # From To Abs Index Characters
  488. * 0 [0..25] [65..90] +65 0 ABCDEFGHIJKLMNOPQRSTUVWXYZ
  489. * 1 [26..51] [97..122] +71 1 abcdefghijklmnopqrstuvwxyz
  490. * 2 [52..61] [48..57] -4 [2..11] 0123456789
  491. * 3 [62] [43] -19 12 +
  492. * 4 [63] [47] -16 13 / */
  493. /* Create LUT indices from input:
  494. * the index for range #0 is right, others are 1 less than expected: */
  495. indices = _mm_subs_epu8(in, _mm_set1_epi8(51));
  496. /* mask is 0xFF (-1) for range #[1..4] and 0x00 for range #0: */
  497. mask = _mm_cmpgt_epi8(in, _mm_set1_epi8(25));
  498. /* subtract -1, so add 1 to indices for range #[1..4], All indices are now correct: */
  499. indices = _mm_sub_epi8(indices, mask);
  500. /* Add offsets to input values: */
  501. return _mm_add_epi8(in, _mm_shuffle_epi8(lut, indices));
  502. }
  503. #define PHP_BASE64_ENCODE_SSSE3_LOOP \
  504. while (length > 15) { \
  505. __m128i s = _mm_loadu_si128((__m128i *)c); \
  506. \
  507. s = php_base64_encode_ssse3_reshuffle(s); \
  508. \
  509. s = php_base64_encode_ssse3_translate(s); \
  510. \
  511. _mm_storeu_si128((__m128i *)o, s); \
  512. c += 12; \
  513. o += 16; \
  514. length -= 12; \
  515. }
  516. #endif /* ZEND_INTRIN_SSSE3_NATIVE || (ZEND_INTRIN_SSSE3_RESOLVER && !ZEND_INTRIN_AVX2_NATIVE) */
  517. #if ZEND_INTRIN_AVX2_NATIVE || ZEND_INTRIN_AVX2_RESOLVER || ZEND_INTRIN_SSSE3_NATIVE || ZEND_INTRIN_SSSE3_RESOLVER
  518. # if ZEND_INTRIN_AVX2_NATIVE || ZEND_INTRIN_SSSE3_NATIVE
  519. PHPAPI zend_string *php_base64_encode(const unsigned char *str, size_t length)
  520. # elif ZEND_INTRIN_AVX2_RESOLVER
  521. zend_string *php_base64_encode_avx2(const unsigned char *str, size_t length)
  522. # else /* ZEND_INTRIN_SSSE3_RESOLVER */
  523. zend_string *php_base64_encode_ssse3(const unsigned char *str, size_t length)
  524. # endif
  525. {
  526. const unsigned char *c = str;
  527. unsigned char *o;
  528. zend_string *result;
  529. result = zend_string_safe_alloc(((length + 2) / 3), 4 * sizeof(char), 0, 0);
  530. o = (unsigned char *)ZSTR_VAL(result);
  531. # if ZEND_INTRIN_AVX2_NATIVE || ZEND_INTRIN_AVX2_RESOLVER
  532. if (length > 31) {
  533. __m256i s = _mm256_loadu_si256((__m256i *)c);
  534. s = _mm256_permutevar8x32_epi32(s, _mm256_setr_epi32(0, 0, 1, 2, 3, 4, 5, 6));
  535. for (;;) {
  536. s = php_base64_encode_avx2_reshuffle(s);
  537. s = php_base64_encode_avx2_translate(s);
  538. _mm256_storeu_si256((__m256i *)o, s);
  539. c += 24;
  540. o += 32;
  541. length -= 24;
  542. if (length < 28) {
  543. break;
  544. }
  545. s = _mm256_loadu_si256((__m256i *)(c - 4));
  546. }
  547. }
  548. # else
  549. PHP_BASE64_ENCODE_SSSE3_LOOP;
  550. # endif
  551. o = php_base64_encode_impl(c, length, o);
  552. ZSTR_LEN(result) = (o - (unsigned char *)ZSTR_VAL(result));
  553. return result;
  554. }
  555. # if ZEND_INTRIN_SSSE3_RESOLVER && ZEND_INTRIN_AVX2_RESOLVER
  556. zend_string *php_base64_encode_ssse3(const unsigned char *str, size_t length)
  557. {
  558. const unsigned char *c = str;
  559. unsigned char *o;
  560. zend_string *result;
  561. result = zend_string_safe_alloc(((length + 2) / 3), 4 * sizeof(char), 0, 0);
  562. o = (unsigned char *)ZSTR_VAL(result);
  563. PHP_BASE64_ENCODE_SSSE3_LOOP;
  564. o = php_base64_encode_impl(c, length, o);
  565. ZSTR_LEN(result) = (o - (unsigned char *)ZSTR_VAL(result));
  566. return result;
  567. }
  568. # endif
  569. #endif /* ZEND_INTRIN_AVX2_NATIVE || ZEND_INTRIN_AVX2_RESOLVER || ZEND_INTRIN_SSSE3_NATIVE || ZEND_INTRIN_SSSE3_RESOLVER */
  570. /* }}} */
  571. #if ZEND_INTRIN_AVX2_NATIVE || ZEND_INTRIN_AVX2_RESOLVER
  572. # if ZEND_INTRIN_AVX2_RESOLVER && defined(HAVE_FUNC_ATTRIBUTE_TARGET)
  573. static __m256i php_base64_decode_avx2_reshuffle(__m256i in) __attribute__((target("avx2")));
  574. # endif
  575. static __m256i php_base64_decode_avx2_reshuffle(__m256i in)
  576. {
  577. __m256i merge_ab_and_bc, out;
  578. merge_ab_and_bc = _mm256_maddubs_epi16(in, _mm256_set1_epi32(0x01400140));
  579. out = _mm256_madd_epi16(merge_ab_and_bc, _mm256_set1_epi32(0x00011000));
  580. out = _mm256_shuffle_epi8(out, _mm256_setr_epi8(
  581. 2, 1, 0, 6, 5, 4, 10, 9, 8, 14, 13, 12, -1, -1, -1, -1,
  582. 2, 1, 0, 6, 5, 4, 10, 9, 8, 14, 13, 12, -1, -1, -1, -1));
  583. return _mm256_permutevar8x32_epi32(out, _mm256_setr_epi32(0, 1, 2, 4, 5, 6, -1, -1));
  584. }
  585. #endif
  586. #if ZEND_INTRIN_SSSE3_NATIVE || ZEND_INTRIN_SSSE3_RESOLVER
  587. # if ZEND_INTRIN_SSSE3_RESOLVER && defined(HAVE_FUNC_ATTRIBUTE_TARGET)
  588. static __m128i php_base64_decode_ssse3_reshuffle(__m128i in) __attribute__((target("ssse3")));
  589. # endif
  590. static __m128i php_base64_decode_ssse3_reshuffle(__m128i in)
  591. {
  592. __m128i merge_ab_and_bc, out;
  593. merge_ab_and_bc = _mm_maddubs_epi16(in, _mm_set1_epi32(0x01400140));
  594. /* 0000kkkk LLllllll 0000JJJJ JJjjKKKK
  595. * 0000hhhh IIiiiiii 0000GGGG GGggHHHH
  596. * 0000eeee FFffffff 0000DDDD DDddEEEE
  597. * 0000bbbb CCcccccc 0000AAAA AAaaBBBB */
  598. out = _mm_madd_epi16(merge_ab_and_bc, _mm_set1_epi32(0x00011000));
  599. /* 00000000 JJJJJJjj KKKKkkkk LLllllll
  600. * 00000000 GGGGGGgg HHHHhhhh IIiiiiii
  601. * 00000000 DDDDDDdd EEEEeeee FFffffff
  602. * 00000000 AAAAAAaa BBBBbbbb CCcccccc */
  603. return _mm_shuffle_epi8(out, _mm_setr_epi8(
  604. 2, 1, 0,
  605. 6, 5, 4,
  606. 10, 9, 8,
  607. 14, 13, 12,
  608. -1, -1, -1, -1));
  609. /* 00000000 00000000 00000000 00000000
  610. * LLllllll KKKKkkkk JJJJJJjj IIiiiiii
  611. * HHHHhhhh GGGGGGgg FFffffff EEEEeeee
  612. * DDDDDDdd CCcccccc BBBBbbbb AAAAAAaa */
  613. }
  614. #define PHP_BASE64_DECODE_SSSE3_LOOP \
  615. while (length > 15 + 6 + 2) { \
  616. __m128i lut_lo, lut_hi, lut_roll; \
  617. __m128i hi_nibbles, lo_nibbles, hi, lo; \
  618. __m128i s = _mm_loadu_si128((__m128i *)c); \
  619. \
  620. lut_lo = _mm_setr_epi8( \
  621. 0x15, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, \
  622. 0x11, 0x11, 0x13, 0x1A, 0x1B, 0x1B, 0x1B, 0x1A); \
  623. lut_hi = _mm_setr_epi8( \
  624. 0x10, 0x10, 0x01, 0x02, 0x04, 0x08, 0x04, 0x08, \
  625. 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10); \
  626. lut_roll = _mm_setr_epi8( \
  627. 0, 16, 19, 4, -65, -65, -71, -71, \
  628. 0, 0, 0, 0, 0, 0, 0, 0); \
  629. \
  630. hi_nibbles = _mm_and_si128( \
  631. _mm_srli_epi32(s, 4), _mm_set1_epi8(0x2f)); \
  632. lo_nibbles = _mm_and_si128(s, _mm_set1_epi8(0x2f)); \
  633. hi = _mm_shuffle_epi8(lut_hi, hi_nibbles); \
  634. lo = _mm_shuffle_epi8(lut_lo, lo_nibbles); \
  635. \
  636. \
  637. if (UNEXPECTED( \
  638. _mm_movemask_epi8( \
  639. _mm_cmpgt_epi8( \
  640. _mm_and_si128(lo, hi), _mm_set1_epi8(0))))) { \
  641. break; \
  642. } else { \
  643. __m128i eq_2f, roll; \
  644. \
  645. eq_2f = _mm_cmpeq_epi8(s, _mm_set1_epi8(0x2f)); \
  646. roll = _mm_shuffle_epi8( \
  647. lut_roll, _mm_add_epi8(eq_2f, hi_nibbles)); \
  648. \
  649. s = _mm_add_epi8(s, roll); \
  650. s = php_base64_decode_ssse3_reshuffle(s); \
  651. \
  652. _mm_storeu_si128((__m128i *)o, s); \
  653. \
  654. c += 16; \
  655. o += 12; \
  656. outl += 12; \
  657. length -= 16; \
  658. } \
  659. }
  660. #endif
  661. #if ZEND_INTRIN_AVX2_NATIVE || ZEND_INTRIN_AVX2_RESOLVER || ZEND_INTRIN_SSSE3_NATIVE || ZEND_INTRIN_SSSE3_RESOLVER
  662. # if ZEND_INTRIN_AVX2_NATIVE || ZEND_INTRIN_SSSE3_NATIVE
  663. PHPAPI zend_string *php_base64_decode_ex(const unsigned char *str, size_t length, bool strict)
  664. # elif ZEND_INTRIN_AVX2_RESOLVER
  665. zend_string *php_base64_decode_ex_avx2(const unsigned char *str, size_t length, bool strict)
  666. # else
  667. zend_string *php_base64_decode_ex_ssse3(const unsigned char *str, size_t length, bool strict)
  668. # endif
  669. {
  670. const unsigned char *c = str;
  671. unsigned char *o;
  672. size_t outl = 0;
  673. zend_string *result;
  674. result = zend_string_alloc(length, 0);
  675. o = (unsigned char *)ZSTR_VAL(result);
  676. /* See: "Faster Base64 Encoding and Decoding using AVX2 Instructions"
  677. * https://arxiv.org/pdf/1704.00605.pdf */
  678. # if ZEND_INTRIN_AVX2_NATIVE || ZEND_INTRIN_AVX2_RESOLVER
  679. while (length > 31 + 11 + 2) {
  680. __m256i lut_lo, lut_hi, lut_roll;
  681. __m256i hi_nibbles, lo_nibbles, hi, lo;
  682. __m256i str = _mm256_loadu_si256((__m256i *)c);
  683. lut_lo = _mm256_setr_epi8(
  684. 0x15, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11,
  685. 0x11, 0x11, 0x13, 0x1A, 0x1B, 0x1B, 0x1B, 0x1A,
  686. 0x15, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11,
  687. 0x11, 0x11, 0x13, 0x1A, 0x1B, 0x1B, 0x1B, 0x1A);
  688. lut_hi = _mm256_setr_epi8(
  689. 0x10, 0x10, 0x01, 0x02, 0x04, 0x08, 0x04, 0x08,
  690. 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10,
  691. 0x10, 0x10, 0x01, 0x02, 0x04, 0x08, 0x04, 0x08,
  692. 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10);
  693. lut_roll = _mm256_setr_epi8(
  694. 0, 16, 19, 4, -65, -65, -71, -71,
  695. 0, 0, 0, 0, 0, 0, 0, 0,
  696. 0, 16, 19, 4, -65, -65, -71, -71,
  697. 0, 0, 0, 0, 0, 0, 0, 0);
  698. hi_nibbles = _mm256_and_si256(_mm256_srli_epi32(str, 4), _mm256_set1_epi8(0x2f));
  699. lo_nibbles = _mm256_and_si256(str, _mm256_set1_epi8(0x2f));
  700. hi = _mm256_shuffle_epi8(lut_hi, hi_nibbles);
  701. lo = _mm256_shuffle_epi8(lut_lo, lo_nibbles);
  702. if (!_mm256_testz_si256(lo, hi)) {
  703. break;
  704. } else {
  705. __m256i eq_2f, roll;
  706. eq_2f = _mm256_cmpeq_epi8(str, _mm256_set1_epi8(0x2f));
  707. roll = _mm256_shuffle_epi8(lut_roll, _mm256_add_epi8(eq_2f, hi_nibbles));
  708. str = _mm256_add_epi8(str, roll);
  709. str = php_base64_decode_avx2_reshuffle(str);
  710. _mm256_storeu_si256((__m256i *)o, str);
  711. c += 32;
  712. o += 24;
  713. outl += 24;
  714. length -= 32;
  715. }
  716. }
  717. # else
  718. PHP_BASE64_DECODE_SSSE3_LOOP;
  719. # endif
  720. if (!php_base64_decode_impl(c, length, (unsigned char*)ZSTR_VAL(result), &outl, strict)) {
  721. zend_string_efree(result);
  722. return NULL;
  723. }
  724. ZSTR_LEN(result) = outl;
  725. return result;
  726. }
  727. # if ZEND_INTRIN_SSSE3_RESOLVER && ZEND_INTRIN_AVX2_RESOLVER
  728. zend_string *php_base64_decode_ex_ssse3(const unsigned char *str, size_t length, bool strict)
  729. {
  730. const unsigned char *c = str;
  731. unsigned char *o;
  732. size_t outl = 0;
  733. zend_string *result;
  734. result = zend_string_alloc(length, 0);
  735. o = (unsigned char *)ZSTR_VAL(result);
  736. PHP_BASE64_DECODE_SSSE3_LOOP;
  737. if (!php_base64_decode_impl(c, length, (unsigned char*)ZSTR_VAL(result), &outl, strict)) {
  738. zend_string_efree(result);
  739. return NULL;
  740. }
  741. ZSTR_LEN(result) = outl;
  742. return result;
  743. }
  744. # endif
  745. #endif /* ZEND_INTRIN_AVX2_NATIVE || ZEND_INTRIN_AVX2_RESOLVER || ZEND_INTRIN_SSSE3_NATIVE || ZEND_INTRIN_SSSE3_RESOLVER */
  746. #if !ZEND_INTRIN_AVX2_NATIVE && !ZEND_INTRIN_SSSE3_NATIVE
  747. #if ZEND_INTRIN_AVX2_RESOLVER || ZEND_INTRIN_SSSE3_RESOLVER
  748. zend_string *php_base64_encode_default(const unsigned char *str, size_t length)
  749. #else
  750. PHPAPI zend_string *php_base64_encode(const unsigned char *str, size_t length)
  751. #endif
  752. {
  753. unsigned char *p;
  754. zend_string *result;
  755. result = zend_string_safe_alloc(((length + 2) / 3), 4 * sizeof(char), 0, 0);
  756. p = (unsigned char *)ZSTR_VAL(result);
  757. p = php_base64_encode_impl(str, length, p);
  758. ZSTR_LEN(result) = (p - (unsigned char *)ZSTR_VAL(result));
  759. return result;
  760. }
  761. #endif
  762. #if !ZEND_INTRIN_AVX2_NATIVE && !ZEND_INTRIN_SSSE3_NATIVE
  763. #if ZEND_INTRIN_AVX2_RESOLVER || ZEND_INTRIN_SSSE3_RESOLVER
  764. zend_string *php_base64_decode_ex_default(const unsigned char *str, size_t length, bool strict)
  765. #else
  766. PHPAPI zend_string *php_base64_decode_ex(const unsigned char *str, size_t length, bool strict)
  767. #endif
  768. {
  769. zend_string *result;
  770. size_t outl = 0;
  771. result = zend_string_alloc(length, 0);
  772. if (!php_base64_decode_impl(str, length, (unsigned char*)ZSTR_VAL(result), &outl, strict)) {
  773. zend_string_efree(result);
  774. return NULL;
  775. }
  776. ZSTR_LEN(result) = outl;
  777. return result;
  778. }
  779. #endif
  780. /* }}} */
  781. /* {{{ Encodes string using MIME base64 algorithm */
  782. PHP_FUNCTION(base64_encode)
  783. {
  784. char *str;
  785. size_t str_len;
  786. zend_string *result;
  787. ZEND_PARSE_PARAMETERS_START(1, 1)
  788. Z_PARAM_STRING(str, str_len)
  789. ZEND_PARSE_PARAMETERS_END();
  790. result = php_base64_encode((unsigned char*)str, str_len);
  791. RETURN_STR(result);
  792. }
  793. /* }}} */
  794. /* {{{ Decodes string using MIME base64 algorithm */
  795. PHP_FUNCTION(base64_decode)
  796. {
  797. char *str;
  798. bool strict = 0;
  799. size_t str_len;
  800. zend_string *result;
  801. ZEND_PARSE_PARAMETERS_START(1, 2)
  802. Z_PARAM_STRING(str, str_len)
  803. Z_PARAM_OPTIONAL
  804. Z_PARAM_BOOL(strict)
  805. ZEND_PARSE_PARAMETERS_END();
  806. result = php_base64_decode_ex((unsigned char*)str, str_len, strict);
  807. if (result != NULL) {
  808. RETURN_STR(result);
  809. } else {
  810. RETURN_FALSE;
  811. }
  812. }
  813. /* }}} */