pchart.py 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582
  1. # Natural Language Toolkit: Probabilistic Chart Parsers
  2. #
  3. # Copyright (C) 2001-2019 NLTK Project
  4. # Author: Edward Loper <edloper@gmail.com>
  5. # Steven Bird <stevenbird1@gmail.com>
  6. # URL: <http://nltk.org/>
  7. # For license information, see LICENSE.TXT
  8. """
  9. Classes and interfaces for associating probabilities with tree
  10. structures that represent the internal organization of a text. The
  11. probabilistic parser module defines ``BottomUpProbabilisticChartParser``.
  12. ``BottomUpProbabilisticChartParser`` is an abstract class that implements
  13. a bottom-up chart parser for ``PCFG`` grammars. It maintains a queue of edges,
  14. and adds them to the chart one at a time. The ordering of this queue
  15. is based on the probabilities associated with the edges, allowing the
  16. parser to expand more likely edges before less likely ones. Each
  17. subclass implements a different queue ordering, producing different
  18. search strategies. Currently the following subclasses are defined:
  19. - ``InsideChartParser`` searches edges in decreasing order of
  20. their trees' inside probabilities.
  21. - ``RandomChartParser`` searches edges in random order.
  22. - ``LongestChartParser`` searches edges in decreasing order of their
  23. location's length.
  24. The ``BottomUpProbabilisticChartParser`` constructor has an optional
  25. argument beam_size. If non-zero, this controls the size of the beam
  26. (aka the edge queue). This option is most useful with InsideChartParser.
  27. """
  28. from __future__ import print_function, unicode_literals
  29. ##//////////////////////////////////////////////////////
  30. ## Bottom-Up PCFG Chart Parser
  31. ##//////////////////////////////////////////////////////
  32. # [XX] This might not be implemented quite right -- it would be better
  33. # to associate probabilities with child pointer lists.
  34. import random
  35. from functools import reduce
  36. from nltk.tree import Tree, ProbabilisticTree
  37. from nltk.grammar import Nonterminal, PCFG
  38. from nltk.parse.api import ParserI
  39. from nltk.parse.chart import Chart, LeafEdge, TreeEdge, AbstractChartRule
  40. from nltk.compat import python_2_unicode_compatible
  41. # Probabilistic edges
  42. class ProbabilisticLeafEdge(LeafEdge):
  43. def prob(self):
  44. return 1.0
  45. class ProbabilisticTreeEdge(TreeEdge):
  46. def __init__(self, prob, *args, **kwargs):
  47. TreeEdge.__init__(self, *args, **kwargs)
  48. self._prob = prob
  49. # two edges with different probabilities are not equal.
  50. self._comparison_key = (self._comparison_key, prob)
  51. def prob(self):
  52. return self._prob
  53. @staticmethod
  54. def from_production(production, index, p):
  55. return ProbabilisticTreeEdge(
  56. p, (index, index), production.lhs(), production.rhs(), 0
  57. )
  58. # Rules using probabilistic edges
  59. class ProbabilisticBottomUpInitRule(AbstractChartRule):
  60. NUM_EDGES = 0
  61. def apply(self, chart, grammar):
  62. for index in range(chart.num_leaves()):
  63. new_edge = ProbabilisticLeafEdge(chart.leaf(index), index)
  64. if chart.insert(new_edge, ()):
  65. yield new_edge
  66. class ProbabilisticBottomUpPredictRule(AbstractChartRule):
  67. NUM_EDGES = 1
  68. def apply(self, chart, grammar, edge):
  69. if edge.is_incomplete():
  70. return
  71. for prod in grammar.productions():
  72. if edge.lhs() == prod.rhs()[0]:
  73. new_edge = ProbabilisticTreeEdge.from_production(
  74. prod, edge.start(), prod.prob()
  75. )
  76. if chart.insert(new_edge, ()):
  77. yield new_edge
  78. class ProbabilisticFundamentalRule(AbstractChartRule):
  79. NUM_EDGES = 2
  80. def apply(self, chart, grammar, left_edge, right_edge):
  81. # Make sure the rule is applicable.
  82. if not (
  83. left_edge.end() == right_edge.start()
  84. and left_edge.nextsym() == right_edge.lhs()
  85. and left_edge.is_incomplete()
  86. and right_edge.is_complete()
  87. ):
  88. return
  89. # Construct the new edge.
  90. p = left_edge.prob() * right_edge.prob()
  91. new_edge = ProbabilisticTreeEdge(
  92. p,
  93. span=(left_edge.start(), right_edge.end()),
  94. lhs=left_edge.lhs(),
  95. rhs=left_edge.rhs(),
  96. dot=left_edge.dot() + 1,
  97. )
  98. # Add it to the chart, with appropriate child pointers.
  99. changed_chart = False
  100. for cpl1 in chart.child_pointer_lists(left_edge):
  101. if chart.insert(new_edge, cpl1 + (right_edge,)):
  102. changed_chart = True
  103. # If we changed the chart, then generate the edge.
  104. if changed_chart:
  105. yield new_edge
  106. @python_2_unicode_compatible
  107. class SingleEdgeProbabilisticFundamentalRule(AbstractChartRule):
  108. NUM_EDGES = 1
  109. _fundamental_rule = ProbabilisticFundamentalRule()
  110. def apply(self, chart, grammar, edge1):
  111. fr = self._fundamental_rule
  112. if edge1.is_incomplete():
  113. # edge1 = left_edge; edge2 = right_edge
  114. for edge2 in chart.select(
  115. start=edge1.end(), is_complete=True, lhs=edge1.nextsym()
  116. ):
  117. for new_edge in fr.apply(chart, grammar, edge1, edge2):
  118. yield new_edge
  119. else:
  120. # edge2 = left_edge; edge1 = right_edge
  121. for edge2 in chart.select(
  122. end=edge1.start(), is_complete=False, nextsym=edge1.lhs()
  123. ):
  124. for new_edge in fr.apply(chart, grammar, edge2, edge1):
  125. yield new_edge
  126. def __str__(self):
  127. return 'Fundamental Rule'
  128. class BottomUpProbabilisticChartParser(ParserI):
  129. """
  130. An abstract bottom-up parser for ``PCFG`` grammars that uses a ``Chart`` to
  131. record partial results. ``BottomUpProbabilisticChartParser`` maintains
  132. a queue of edges that can be added to the chart. This queue is
  133. initialized with edges for each token in the text that is being
  134. parsed. ``BottomUpProbabilisticChartParser`` inserts these edges into
  135. the chart one at a time, starting with the most likely edges, and
  136. proceeding to less likely edges. For each edge that is added to
  137. the chart, it may become possible to insert additional edges into
  138. the chart; these are added to the queue. This process continues
  139. until enough complete parses have been generated, or until the
  140. queue is empty.
  141. The sorting order for the queue is not specified by
  142. ``BottomUpProbabilisticChartParser``. Different sorting orders will
  143. result in different search strategies. The sorting order for the
  144. queue is defined by the method ``sort_queue``; subclasses are required
  145. to provide a definition for this method.
  146. :type _grammar: PCFG
  147. :ivar _grammar: The grammar used to parse sentences.
  148. :type _trace: int
  149. :ivar _trace: The level of tracing output that should be generated
  150. when parsing a text.
  151. """
  152. def __init__(self, grammar, beam_size=0, trace=0):
  153. """
  154. Create a new ``BottomUpProbabilisticChartParser``, that uses
  155. ``grammar`` to parse texts.
  156. :type grammar: PCFG
  157. :param grammar: The grammar used to parse texts.
  158. :type beam_size: int
  159. :param beam_size: The maximum length for the parser's edge queue.
  160. :type trace: int
  161. :param trace: The level of tracing that should be used when
  162. parsing a text. ``0`` will generate no tracing output;
  163. and higher numbers will produce more verbose tracing
  164. output.
  165. """
  166. if not isinstance(grammar, PCFG):
  167. raise ValueError("The grammar must be probabilistic PCFG")
  168. self._grammar = grammar
  169. self.beam_size = beam_size
  170. self._trace = trace
  171. def grammar(self):
  172. return self._grammar
  173. def trace(self, trace=2):
  174. """
  175. Set the level of tracing output that should be generated when
  176. parsing a text.
  177. :type trace: int
  178. :param trace: The trace level. A trace level of ``0`` will
  179. generate no tracing output; and higher trace levels will
  180. produce more verbose tracing output.
  181. :rtype: None
  182. """
  183. self._trace = trace
  184. # TODO: change this to conform more with the standard ChartParser
  185. def parse(self, tokens):
  186. self._grammar.check_coverage(tokens)
  187. chart = Chart(list(tokens))
  188. grammar = self._grammar
  189. # Chart parser rules.
  190. bu_init = ProbabilisticBottomUpInitRule()
  191. bu = ProbabilisticBottomUpPredictRule()
  192. fr = SingleEdgeProbabilisticFundamentalRule()
  193. # Our queue
  194. queue = []
  195. # Initialize the chart.
  196. for edge in bu_init.apply(chart, grammar):
  197. if self._trace > 1:
  198. print(
  199. ' %-50s [%s]'
  200. % (chart.pretty_format_edge(edge, width=2), edge.prob())
  201. )
  202. queue.append(edge)
  203. while len(queue) > 0:
  204. # Re-sort the queue.
  205. self.sort_queue(queue, chart)
  206. # Prune the queue to the correct size if a beam was defined
  207. if self.beam_size:
  208. self._prune(queue, chart)
  209. # Get the best edge.
  210. edge = queue.pop()
  211. if self._trace > 0:
  212. print(
  213. ' %-50s [%s]'
  214. % (chart.pretty_format_edge(edge, width=2), edge.prob())
  215. )
  216. # Apply BU & FR to it.
  217. queue.extend(bu.apply(chart, grammar, edge))
  218. queue.extend(fr.apply(chart, grammar, edge))
  219. # Get a list of complete parses.
  220. parses = list(chart.parses(grammar.start(), ProbabilisticTree))
  221. # Assign probabilities to the trees.
  222. prod_probs = {}
  223. for prod in grammar.productions():
  224. prod_probs[prod.lhs(), prod.rhs()] = prod.prob()
  225. for parse in parses:
  226. self._setprob(parse, prod_probs)
  227. # Sort by probability
  228. parses.sort(reverse=True, key=lambda tree: tree.prob())
  229. return iter(parses)
  230. def _setprob(self, tree, prod_probs):
  231. if tree.prob() is not None:
  232. return
  233. # Get the prob of the CFG production.
  234. lhs = Nonterminal(tree.label())
  235. rhs = []
  236. for child in tree:
  237. if isinstance(child, Tree):
  238. rhs.append(Nonterminal(child.label()))
  239. else:
  240. rhs.append(child)
  241. prob = prod_probs[lhs, tuple(rhs)]
  242. # Get the probs of children.
  243. for child in tree:
  244. if isinstance(child, Tree):
  245. self._setprob(child, prod_probs)
  246. prob *= child.prob()
  247. tree.set_prob(prob)
  248. def sort_queue(self, queue, chart):
  249. """
  250. Sort the given queue of ``Edge`` objects, placing the edge that should
  251. be tried first at the beginning of the queue. This method
  252. will be called after each ``Edge`` is added to the queue.
  253. :param queue: The queue of ``Edge`` objects to sort. Each edge in
  254. this queue is an edge that could be added to the chart by
  255. the fundamental rule; but that has not yet been added.
  256. :type queue: list(Edge)
  257. :param chart: The chart being used to parse the text. This
  258. chart can be used to provide extra information for sorting
  259. the queue.
  260. :type chart: Chart
  261. :rtype: None
  262. """
  263. raise NotImplementedError()
  264. def _prune(self, queue, chart):
  265. """ Discard items in the queue if the queue is longer than the beam."""
  266. if len(queue) > self.beam_size:
  267. split = len(queue) - self.beam_size
  268. if self._trace > 2:
  269. for edge in queue[:split]:
  270. print(' %-50s [DISCARDED]' % chart.pretty_format_edge(edge, 2))
  271. del queue[:split]
  272. class InsideChartParser(BottomUpProbabilisticChartParser):
  273. """
  274. A bottom-up parser for ``PCFG`` grammars that tries edges in descending
  275. order of the inside probabilities of their trees. The "inside
  276. probability" of a tree is simply the
  277. probability of the entire tree, ignoring its context. In
  278. particular, the inside probability of a tree generated by
  279. production *p* with children *c[1], c[2], ..., c[n]* is
  280. *P(p)P(c[1])P(c[2])...P(c[n])*; and the inside
  281. probability of a token is 1 if it is present in the text, and 0 if
  282. it is absent.
  283. This sorting order results in a type of lowest-cost-first search
  284. strategy.
  285. """
  286. # Inherit constructor.
  287. def sort_queue(self, queue, chart):
  288. """
  289. Sort the given queue of edges, in descending order of the
  290. inside probabilities of the edges' trees.
  291. :param queue: The queue of ``Edge`` objects to sort. Each edge in
  292. this queue is an edge that could be added to the chart by
  293. the fundamental rule; but that has not yet been added.
  294. :type queue: list(Edge)
  295. :param chart: The chart being used to parse the text. This
  296. chart can be used to provide extra information for sorting
  297. the queue.
  298. :type chart: Chart
  299. :rtype: None
  300. """
  301. queue.sort(key=lambda edge: edge.prob())
  302. # Eventually, this will become some sort of inside-outside parser:
  303. # class InsideOutsideParser(BottomUpProbabilisticChartParser):
  304. # def __init__(self, grammar, trace=0):
  305. # # Inherit docs.
  306. # BottomUpProbabilisticChartParser.__init__(self, grammar, trace)
  307. #
  308. # # Find the best path from S to each nonterminal
  309. # bestp = {}
  310. # for production in grammar.productions(): bestp[production.lhs()]=0
  311. # bestp[grammar.start()] = 1.0
  312. #
  313. # for i in range(len(grammar.productions())):
  314. # for production in grammar.productions():
  315. # lhs = production.lhs()
  316. # for elt in production.rhs():
  317. # bestp[elt] = max(bestp[lhs]*production.prob(),
  318. # bestp.get(elt,0))
  319. #
  320. # self._bestp = bestp
  321. # for (k,v) in self._bestp.items(): print k,v
  322. #
  323. # def _sortkey(self, edge):
  324. # return edge.structure()[PROB] * self._bestp[edge.lhs()]
  325. #
  326. # def sort_queue(self, queue, chart):
  327. # queue.sort(key=self._sortkey)
  328. class RandomChartParser(BottomUpProbabilisticChartParser):
  329. """
  330. A bottom-up parser for ``PCFG`` grammars that tries edges in random order.
  331. This sorting order results in a random search strategy.
  332. """
  333. # Inherit constructor
  334. def sort_queue(self, queue, chart):
  335. i = random.randint(0, len(queue) - 1)
  336. (queue[-1], queue[i]) = (queue[i], queue[-1])
  337. class UnsortedChartParser(BottomUpProbabilisticChartParser):
  338. """
  339. A bottom-up parser for ``PCFG`` grammars that tries edges in whatever order.
  340. """
  341. # Inherit constructor
  342. def sort_queue(self, queue, chart):
  343. return
  344. class LongestChartParser(BottomUpProbabilisticChartParser):
  345. """
  346. A bottom-up parser for ``PCFG`` grammars that tries longer edges before
  347. shorter ones. This sorting order results in a type of best-first
  348. search strategy.
  349. """
  350. # Inherit constructor
  351. def sort_queue(self, queue, chart):
  352. queue.sort(key=lambda edge: edge.length())
  353. ##//////////////////////////////////////////////////////
  354. ## Test Code
  355. ##//////////////////////////////////////////////////////
  356. def demo(choice=None, draw_parses=None, print_parses=None):
  357. """
  358. A demonstration of the probabilistic parsers. The user is
  359. prompted to select which demo to run, and how many parses should
  360. be found; and then each parser is run on the same demo, and a
  361. summary of the results are displayed.
  362. """
  363. import sys, time
  364. from nltk import tokenize
  365. from nltk.parse import pchart
  366. # Define two demos. Each demo has a sentence and a grammar.
  367. toy_pcfg1 = PCFG.fromstring(
  368. """
  369. S -> NP VP [1.0]
  370. NP -> Det N [0.5] | NP PP [0.25] | 'John' [0.1] | 'I' [0.15]
  371. Det -> 'the' [0.8] | 'my' [0.2]
  372. N -> 'man' [0.5] | 'telescope' [0.5]
  373. VP -> VP PP [0.1] | V NP [0.7] | V [0.2]
  374. V -> 'ate' [0.35] | 'saw' [0.65]
  375. PP -> P NP [1.0]
  376. P -> 'with' [0.61] | 'under' [0.39]
  377. """
  378. )
  379. toy_pcfg2 = PCFG.fromstring(
  380. """
  381. S -> NP VP [1.0]
  382. VP -> V NP [.59]
  383. VP -> V [.40]
  384. VP -> VP PP [.01]
  385. NP -> Det N [.41]
  386. NP -> Name [.28]
  387. NP -> NP PP [.31]
  388. PP -> P NP [1.0]
  389. V -> 'saw' [.21]
  390. V -> 'ate' [.51]
  391. V -> 'ran' [.28]
  392. N -> 'boy' [.11]
  393. N -> 'cookie' [.12]
  394. N -> 'table' [.13]
  395. N -> 'telescope' [.14]
  396. N -> 'hill' [.5]
  397. Name -> 'Jack' [.52]
  398. Name -> 'Bob' [.48]
  399. P -> 'with' [.61]
  400. P -> 'under' [.39]
  401. Det -> 'the' [.41]
  402. Det -> 'a' [.31]
  403. Det -> 'my' [.28]
  404. """
  405. )
  406. demos = [
  407. ('I saw John with my telescope', toy_pcfg1),
  408. ('the boy saw Jack with Bob under the table with a telescope', toy_pcfg2),
  409. ]
  410. if choice is None:
  411. # Ask the user which demo they want to use.
  412. print()
  413. for i in range(len(demos)):
  414. print('%3s: %s' % (i + 1, demos[i][0]))
  415. print(' %r' % demos[i][1])
  416. print()
  417. print('Which demo (%d-%d)? ' % (1, len(demos)), end=' ')
  418. choice = int(sys.stdin.readline().strip()) - 1
  419. try:
  420. sent, grammar = demos[choice]
  421. except:
  422. print('Bad sentence number')
  423. return
  424. # Tokenize the sentence.
  425. tokens = sent.split()
  426. # Define a list of parsers. We'll use all parsers.
  427. parsers = [
  428. pchart.InsideChartParser(grammar),
  429. pchart.RandomChartParser(grammar),
  430. pchart.UnsortedChartParser(grammar),
  431. pchart.LongestChartParser(grammar),
  432. pchart.InsideChartParser(grammar, beam_size=len(tokens) + 1), # was BeamParser
  433. ]
  434. # Run the parsers on the tokenized sentence.
  435. times = []
  436. average_p = []
  437. num_parses = []
  438. all_parses = {}
  439. for parser in parsers:
  440. print('\ns: %s\nparser: %s\ngrammar: %s' % (sent, parser, grammar))
  441. parser.trace(3)
  442. t = time.time()
  443. parses = list(parser.parse(tokens))
  444. times.append(time.time() - t)
  445. p = reduce(lambda a, b: a + b.prob(), parses, 0) / len(parses) if parses else 0
  446. average_p.append(p)
  447. num_parses.append(len(parses))
  448. for p in parses:
  449. all_parses[p.freeze()] = 1
  450. # Print some summary statistics
  451. print()
  452. print(' Parser Beam | Time (secs) # Parses Average P(parse)')
  453. print('------------------------+------------------------------------------')
  454. for i in range(len(parsers)):
  455. print(
  456. '%18s %4d |%11.4f%11d%19.14f'
  457. % (
  458. parsers[i].__class__.__name__,
  459. parsers[i].beam_size,
  460. times[i],
  461. num_parses[i],
  462. average_p[i],
  463. )
  464. )
  465. parses = all_parses.keys()
  466. if parses:
  467. p = reduce(lambda a, b: a + b.prob(), parses, 0) / len(parses)
  468. else:
  469. p = 0
  470. print('------------------------+------------------------------------------')
  471. print('%18s |%11s%11d%19.14f' % ('(All Parses)', 'n/a', len(parses), p))
  472. if draw_parses is None:
  473. # Ask the user if we should draw the parses.
  474. print()
  475. print('Draw parses (y/n)? ', end=' ')
  476. draw_parses = sys.stdin.readline().strip().lower().startswith('y')
  477. if draw_parses:
  478. from nltk.draw.tree import draw_trees
  479. print(' please wait...')
  480. draw_trees(*parses)
  481. if print_parses is None:
  482. # Ask the user if we should print the parses.
  483. print()
  484. print('Print parses (y/n)? ', end=' ')
  485. print_parses = sys.stdin.readline().strip().lower().startswith('y')
  486. if print_parses:
  487. for parse in parses:
  488. print(parse)
  489. if __name__ == '__main__':
  490. demo()