sre_compile.py 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554
  1. #
  2. # Secret Labs' Regular Expression Engine
  3. #
  4. # convert template to internal format
  5. #
  6. # Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved.
  7. #
  8. # See the sre.py file for information on usage and redistribution.
  9. #
  10. """Internal support module for sre"""
  11. import _sre
  12. import sre_parse
  13. from sre_constants import *
  14. assert _sre.MAGIC == MAGIC, "SRE module mismatch"
  15. _LITERAL_CODES = {LITERAL, NOT_LITERAL}
  16. _REPEATING_CODES = {REPEAT, MIN_REPEAT, MAX_REPEAT}
  17. _SUCCESS_CODES = {SUCCESS, FAILURE}
  18. _ASSERT_CODES = {ASSERT, ASSERT_NOT}
  19. # Sets of lowercase characters which have the same uppercase.
  20. _equivalences = (
  21. # LATIN SMALL LETTER I, LATIN SMALL LETTER DOTLESS I
  22. (0x69, 0x131), # iı
  23. # LATIN SMALL LETTER S, LATIN SMALL LETTER LONG S
  24. (0x73, 0x17f), # sſ
  25. # MICRO SIGN, GREEK SMALL LETTER MU
  26. (0xb5, 0x3bc), # µμ
  27. # COMBINING GREEK YPOGEGRAMMENI, GREEK SMALL LETTER IOTA, GREEK PROSGEGRAMMENI
  28. (0x345, 0x3b9, 0x1fbe), # \u0345ιι
  29. # GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS, GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA
  30. (0x390, 0x1fd3), # ΐΐ
  31. # GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS, GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND OXIA
  32. (0x3b0, 0x1fe3), # ΰΰ
  33. # GREEK SMALL LETTER BETA, GREEK BETA SYMBOL
  34. (0x3b2, 0x3d0), # βϐ
  35. # GREEK SMALL LETTER EPSILON, GREEK LUNATE EPSILON SYMBOL
  36. (0x3b5, 0x3f5), # εϵ
  37. # GREEK SMALL LETTER THETA, GREEK THETA SYMBOL
  38. (0x3b8, 0x3d1), # θϑ
  39. # GREEK SMALL LETTER KAPPA, GREEK KAPPA SYMBOL
  40. (0x3ba, 0x3f0), # κϰ
  41. # GREEK SMALL LETTER PI, GREEK PI SYMBOL
  42. (0x3c0, 0x3d6), # πϖ
  43. # GREEK SMALL LETTER RHO, GREEK RHO SYMBOL
  44. (0x3c1, 0x3f1), # ρϱ
  45. # GREEK SMALL LETTER FINAL SIGMA, GREEK SMALL LETTER SIGMA
  46. (0x3c2, 0x3c3), # ςσ
  47. # GREEK SMALL LETTER PHI, GREEK PHI SYMBOL
  48. (0x3c6, 0x3d5), # φϕ
  49. # LATIN SMALL LETTER S WITH DOT ABOVE, LATIN SMALL LETTER LONG S WITH DOT ABOVE
  50. (0x1e61, 0x1e9b), # ṡẛ
  51. # LATIN SMALL LIGATURE LONG S T, LATIN SMALL LIGATURE ST
  52. (0xfb05, 0xfb06), # ſtst
  53. )
  54. # Maps the lowercase code to lowercase codes which have the same uppercase.
  55. _ignorecase_fixes = {i: tuple(j for j in t if i != j)
  56. for t in _equivalences for i in t}
  57. def _compile(code, pattern, flags):
  58. # internal: compile a (sub)pattern
  59. emit = code.append
  60. _len = len
  61. LITERAL_CODES = _LITERAL_CODES
  62. REPEATING_CODES = _REPEATING_CODES
  63. SUCCESS_CODES = _SUCCESS_CODES
  64. ASSERT_CODES = _ASSERT_CODES
  65. if (flags & SRE_FLAG_IGNORECASE and
  66. not (flags & SRE_FLAG_LOCALE) and
  67. flags & SRE_FLAG_UNICODE):
  68. fixes = _ignorecase_fixes
  69. else:
  70. fixes = None
  71. for op, av in pattern:
  72. if op in LITERAL_CODES:
  73. if flags & SRE_FLAG_IGNORECASE:
  74. lo = _sre.getlower(av, flags)
  75. if fixes and lo in fixes:
  76. emit(IN_IGNORE)
  77. skip = _len(code); emit(0)
  78. if op is NOT_LITERAL:
  79. emit(NEGATE)
  80. for k in (lo,) + fixes[lo]:
  81. emit(LITERAL)
  82. emit(k)
  83. emit(FAILURE)
  84. code[skip] = _len(code) - skip
  85. else:
  86. emit(OP_IGNORE[op])
  87. emit(lo)
  88. else:
  89. emit(op)
  90. emit(av)
  91. elif op is IN:
  92. if flags & SRE_FLAG_IGNORECASE:
  93. emit(OP_IGNORE[op])
  94. def fixup(literal, flags=flags):
  95. return _sre.getlower(literal, flags)
  96. else:
  97. emit(op)
  98. fixup = None
  99. skip = _len(code); emit(0)
  100. _compile_charset(av, flags, code, fixup, fixes)
  101. code[skip] = _len(code) - skip
  102. elif op is ANY:
  103. if flags & SRE_FLAG_DOTALL:
  104. emit(ANY_ALL)
  105. else:
  106. emit(ANY)
  107. elif op in REPEATING_CODES:
  108. if flags & SRE_FLAG_TEMPLATE:
  109. raise error("internal: unsupported template operator %r" % (op,))
  110. elif _simple(av) and op is not REPEAT:
  111. if op is MAX_REPEAT:
  112. emit(REPEAT_ONE)
  113. else:
  114. emit(MIN_REPEAT_ONE)
  115. skip = _len(code); emit(0)
  116. emit(av[0])
  117. emit(av[1])
  118. _compile(code, av[2], flags)
  119. emit(SUCCESS)
  120. code[skip] = _len(code) - skip
  121. else:
  122. emit(REPEAT)
  123. skip = _len(code); emit(0)
  124. emit(av[0])
  125. emit(av[1])
  126. _compile(code, av[2], flags)
  127. code[skip] = _len(code) - skip
  128. if op is MAX_REPEAT:
  129. emit(MAX_UNTIL)
  130. else:
  131. emit(MIN_UNTIL)
  132. elif op is SUBPATTERN:
  133. if av[0]:
  134. emit(MARK)
  135. emit((av[0]-1)*2)
  136. # _compile_info(code, av[1], flags)
  137. _compile(code, av[1], flags)
  138. if av[0]:
  139. emit(MARK)
  140. emit((av[0]-1)*2+1)
  141. elif op in SUCCESS_CODES:
  142. emit(op)
  143. elif op in ASSERT_CODES:
  144. emit(op)
  145. skip = _len(code); emit(0)
  146. if av[0] >= 0:
  147. emit(0) # look ahead
  148. else:
  149. lo, hi = av[1].getwidth()
  150. if lo != hi:
  151. raise error("look-behind requires fixed-width pattern")
  152. emit(lo) # look behind
  153. _compile(code, av[1], flags)
  154. emit(SUCCESS)
  155. code[skip] = _len(code) - skip
  156. elif op is CALL:
  157. emit(op)
  158. skip = _len(code); emit(0)
  159. _compile(code, av, flags)
  160. emit(SUCCESS)
  161. code[skip] = _len(code) - skip
  162. elif op is AT:
  163. emit(op)
  164. if flags & SRE_FLAG_MULTILINE:
  165. av = AT_MULTILINE.get(av, av)
  166. if flags & SRE_FLAG_LOCALE:
  167. av = AT_LOCALE.get(av, av)
  168. elif flags & SRE_FLAG_UNICODE:
  169. av = AT_UNICODE.get(av, av)
  170. emit(av)
  171. elif op is BRANCH:
  172. emit(op)
  173. tail = []
  174. tailappend = tail.append
  175. for av in av[1]:
  176. skip = _len(code); emit(0)
  177. # _compile_info(code, av, flags)
  178. _compile(code, av, flags)
  179. emit(JUMP)
  180. tailappend(_len(code)); emit(0)
  181. code[skip] = _len(code) - skip
  182. emit(FAILURE) # end of branch
  183. for tail in tail:
  184. code[tail] = _len(code) - tail
  185. elif op is CATEGORY:
  186. emit(op)
  187. if flags & SRE_FLAG_LOCALE:
  188. av = CH_LOCALE[av]
  189. elif flags & SRE_FLAG_UNICODE:
  190. av = CH_UNICODE[av]
  191. emit(av)
  192. elif op is GROUPREF:
  193. if flags & SRE_FLAG_IGNORECASE:
  194. emit(OP_IGNORE[op])
  195. else:
  196. emit(op)
  197. emit(av-1)
  198. elif op is GROUPREF_EXISTS:
  199. emit(op)
  200. emit(av[0]-1)
  201. skipyes = _len(code); emit(0)
  202. _compile(code, av[1], flags)
  203. if av[2]:
  204. emit(JUMP)
  205. skipno = _len(code); emit(0)
  206. code[skipyes] = _len(code) - skipyes + 1
  207. _compile(code, av[2], flags)
  208. code[skipno] = _len(code) - skipno
  209. else:
  210. code[skipyes] = _len(code) - skipyes + 1
  211. else:
  212. raise error("internal: unsupported operand type %r" % (op,))
  213. def _compile_charset(charset, flags, code, fixup=None, fixes=None):
  214. # compile charset subprogram
  215. emit = code.append
  216. for op, av in _optimize_charset(charset, fixup, fixes):
  217. emit(op)
  218. if op is NEGATE:
  219. pass
  220. elif op is LITERAL:
  221. emit(av)
  222. elif op is RANGE or op is RANGE_IGNORE:
  223. emit(av[0])
  224. emit(av[1])
  225. elif op is CHARSET:
  226. code.extend(av)
  227. elif op is BIGCHARSET:
  228. code.extend(av)
  229. elif op is CATEGORY:
  230. if flags & SRE_FLAG_LOCALE:
  231. emit(CH_LOCALE[av])
  232. elif flags & SRE_FLAG_UNICODE:
  233. emit(CH_UNICODE[av])
  234. else:
  235. emit(av)
  236. else:
  237. raise error("internal: unsupported set operator %r" % (op,))
  238. emit(FAILURE)
  239. def _optimize_charset(charset, fixup, fixes):
  240. # internal: optimize character set
  241. out = []
  242. tail = []
  243. charmap = bytearray(256)
  244. for op, av in charset:
  245. while True:
  246. try:
  247. if op is LITERAL:
  248. if fixup:
  249. lo = fixup(av)
  250. charmap[lo] = 1
  251. if fixes and lo in fixes:
  252. for k in fixes[lo]:
  253. charmap[k] = 1
  254. else:
  255. charmap[av] = 1
  256. elif op is RANGE:
  257. r = range(av[0], av[1]+1)
  258. if fixup:
  259. r = map(fixup, r)
  260. if fixup and fixes:
  261. for i in r:
  262. charmap[i] = 1
  263. if i in fixes:
  264. for k in fixes[i]:
  265. charmap[k] = 1
  266. else:
  267. for i in r:
  268. charmap[i] = 1
  269. elif op is NEGATE:
  270. out.append((op, av))
  271. else:
  272. tail.append((op, av))
  273. except IndexError:
  274. if len(charmap) == 256:
  275. # character set contains non-UCS1 character codes
  276. charmap += b'\0' * 0xff00
  277. continue
  278. # Character set contains non-BMP character codes.
  279. # There are only two ranges of cased non-BMP characters:
  280. # 10400-1044F (Deseret) and 118A0-118DF (Warang Citi),
  281. # and for both ranges RANGE_IGNORE works.
  282. if fixup and op is RANGE:
  283. op = RANGE_IGNORE
  284. tail.append((op, av))
  285. break
  286. # compress character map
  287. runs = []
  288. q = 0
  289. while True:
  290. p = charmap.find(1, q)
  291. if p < 0:
  292. break
  293. if len(runs) >= 2:
  294. runs = None
  295. break
  296. q = charmap.find(0, p)
  297. if q < 0:
  298. runs.append((p, len(charmap)))
  299. break
  300. runs.append((p, q))
  301. if runs is not None:
  302. # use literal/range
  303. for p, q in runs:
  304. if q - p == 1:
  305. out.append((LITERAL, p))
  306. else:
  307. out.append((RANGE, (p, q - 1)))
  308. out += tail
  309. # if the case was changed or new representation is more compact
  310. if fixup or len(out) < len(charset):
  311. return out
  312. # else original character set is good enough
  313. return charset
  314. # use bitmap
  315. if len(charmap) == 256:
  316. data = _mk_bitmap(charmap)
  317. out.append((CHARSET, data))
  318. out += tail
  319. return out
  320. # To represent a big charset, first a bitmap of all characters in the
  321. # set is constructed. Then, this bitmap is sliced into chunks of 256
  322. # characters, duplicate chunks are eliminated, and each chunk is
  323. # given a number. In the compiled expression, the charset is
  324. # represented by a 32-bit word sequence, consisting of one word for
  325. # the number of different chunks, a sequence of 256 bytes (64 words)
  326. # of chunk numbers indexed by their original chunk position, and a
  327. # sequence of 256-bit chunks (8 words each).
  328. # Compression is normally good: in a typical charset, large ranges of
  329. # Unicode will be either completely excluded (e.g. if only cyrillic
  330. # letters are to be matched), or completely included (e.g. if large
  331. # subranges of Kanji match). These ranges will be represented by
  332. # chunks of all one-bits or all zero-bits.
  333. # Matching can be also done efficiently: the more significant byte of
  334. # the Unicode character is an index into the chunk number, and the
  335. # less significant byte is a bit index in the chunk (just like the
  336. # CHARSET matching).
  337. charmap = bytes(charmap) # should be hashable
  338. comps = {}
  339. mapping = bytearray(256)
  340. block = 0
  341. data = bytearray()
  342. for i in range(0, 65536, 256):
  343. chunk = charmap[i: i + 256]
  344. if chunk in comps:
  345. mapping[i // 256] = comps[chunk]
  346. else:
  347. mapping[i // 256] = comps[chunk] = block
  348. block += 1
  349. data += chunk
  350. data = _mk_bitmap(data)
  351. data[0:0] = [block] + _bytes_to_codes(mapping)
  352. out.append((BIGCHARSET, data))
  353. out += tail
  354. return out
  355. _CODEBITS = _sre.CODESIZE * 8
  356. MAXCODE = (1 << _CODEBITS) - 1
  357. _BITS_TRANS = b'0' + b'1' * 255
  358. def _mk_bitmap(bits, _CODEBITS=_CODEBITS, _int=int):
  359. s = bits.translate(_BITS_TRANS)[::-1]
  360. return [_int(s[i - _CODEBITS: i], 2)
  361. for i in range(len(s), 0, -_CODEBITS)]
  362. def _bytes_to_codes(b):
  363. # Convert block indices to word array
  364. a = memoryview(b).cast('I')
  365. assert a.itemsize == _sre.CODESIZE
  366. assert len(a) * a.itemsize == len(b)
  367. return a.tolist()
  368. def _simple(av):
  369. # check if av is a "simple" operator
  370. lo, hi = av[2].getwidth()
  371. return lo == hi == 1 and av[2][0][0] != SUBPATTERN
  372. def _generate_overlap_table(prefix):
  373. """
  374. Generate an overlap table for the following prefix.
  375. An overlap table is a table of the same size as the prefix which
  376. informs about the potential self-overlap for each index in the prefix:
  377. - if overlap[i] == 0, prefix[i:] can't overlap prefix[0:...]
  378. - if overlap[i] == k with 0 < k <= i, prefix[i-k+1:i+1] overlaps with
  379. prefix[0:k]
  380. """
  381. table = [0] * len(prefix)
  382. for i in range(1, len(prefix)):
  383. idx = table[i - 1]
  384. while prefix[i] != prefix[idx]:
  385. if idx == 0:
  386. table[i] = 0
  387. break
  388. idx = table[idx - 1]
  389. else:
  390. table[i] = idx + 1
  391. return table
  392. def _compile_info(code, pattern, flags):
  393. # internal: compile an info block. in the current version,
  394. # this contains min/max pattern width, and an optional literal
  395. # prefix or a character map
  396. lo, hi = pattern.getwidth()
  397. if hi > MAXCODE:
  398. hi = MAXCODE
  399. if lo == 0:
  400. code.extend([INFO, 4, 0, lo, hi])
  401. return
  402. # look for a literal prefix
  403. prefix = []
  404. prefixappend = prefix.append
  405. prefix_skip = 0
  406. charset = [] # not used
  407. charsetappend = charset.append
  408. if not (flags & SRE_FLAG_IGNORECASE):
  409. # look for literal prefix
  410. for op, av in pattern.data:
  411. if op is LITERAL:
  412. if len(prefix) == prefix_skip:
  413. prefix_skip = prefix_skip + 1
  414. prefixappend(av)
  415. elif op is SUBPATTERN and len(av[1]) == 1:
  416. op, av = av[1][0]
  417. if op is LITERAL:
  418. prefixappend(av)
  419. else:
  420. break
  421. else:
  422. break
  423. # if no prefix, look for charset prefix
  424. if not prefix and pattern.data:
  425. op, av = pattern.data[0]
  426. if op is SUBPATTERN and av[1]:
  427. op, av = av[1][0]
  428. if op is LITERAL:
  429. charsetappend((op, av))
  430. elif op is BRANCH:
  431. c = []
  432. cappend = c.append
  433. for p in av[1]:
  434. if not p:
  435. break
  436. op, av = p[0]
  437. if op is LITERAL:
  438. cappend((op, av))
  439. else:
  440. break
  441. else:
  442. charset = c
  443. elif op is BRANCH:
  444. c = []
  445. cappend = c.append
  446. for p in av[1]:
  447. if not p:
  448. break
  449. op, av = p[0]
  450. if op is LITERAL:
  451. cappend((op, av))
  452. else:
  453. break
  454. else:
  455. charset = c
  456. elif op is IN:
  457. charset = av
  458. ## if prefix:
  459. ## print("*** PREFIX", prefix, prefix_skip)
  460. ## if charset:
  461. ## print("*** CHARSET", charset)
  462. # add an info block
  463. emit = code.append
  464. emit(INFO)
  465. skip = len(code); emit(0)
  466. # literal flag
  467. mask = 0
  468. if prefix:
  469. mask = SRE_INFO_PREFIX
  470. if len(prefix) == prefix_skip == len(pattern.data):
  471. mask = mask | SRE_INFO_LITERAL
  472. elif charset:
  473. mask = mask | SRE_INFO_CHARSET
  474. emit(mask)
  475. # pattern length
  476. if lo < MAXCODE:
  477. emit(lo)
  478. else:
  479. emit(MAXCODE)
  480. prefix = prefix[:MAXCODE]
  481. emit(min(hi, MAXCODE))
  482. # add literal prefix
  483. if prefix:
  484. emit(len(prefix)) # length
  485. emit(prefix_skip) # skip
  486. code.extend(prefix)
  487. # generate overlap table
  488. code.extend(_generate_overlap_table(prefix))
  489. elif charset:
  490. _compile_charset(charset, flags, code)
  491. code[skip] = len(code) - skip
  492. def isstring(obj):
  493. return isinstance(obj, (str, bytes))
  494. def _code(p, flags):
  495. flags = p.pattern.flags | flags
  496. code = []
  497. # compile info block
  498. _compile_info(code, p, flags)
  499. # compile the pattern
  500. _compile(code, p.data, flags)
  501. code.append(SUCCESS)
  502. return code
  503. def compile(p, flags=0):
  504. # internal: convert pattern list to internal format
  505. if isstring(p):
  506. pattern = p
  507. p = sre_parse.parse(p, flags)
  508. else:
  509. pattern = None
  510. code = _code(p, flags)
  511. # print(code)
  512. # map in either direction
  513. groupindex = p.pattern.groupdict
  514. indexgroup = [None] * p.pattern.groups
  515. for k, i in groupindex.items():
  516. indexgroup[i] = k
  517. return _sre.compile(
  518. pattern, flags | p.pattern.flags, code,
  519. p.pattern.groups-1,
  520. groupindex, indexgroup
  521. )