python 3.x - Markov analysis - Return and recursion role -
i working on solution of markov analysis in think python, not understand role of "return" in block code below. far known when code reach return function cancel immediately, isn't unnecessary in case, because there recursion here random_text(n-i) before code reach return statement, function cancel when recursion finish mean when loop over?? question seem stupid newbie in python , recursion stuff confusing me. try remove 'return' , still run well.
def random_text(n=100): start = random.choice(list(suffix_map.keys())) in range(n): suffixes = suffix_map.get(start, none) if suffixes == none: # if start isn't in map, got end of # original text, have start again. random_text(n-i) return word = random.choice(suffixes) print(word, end=' ') start = shift(start, word)
the full code below can understand each function do.
from __future__ import print_function, division import os os.chdir(r"c:\users\hoang-ngoc.anh\documents\winpython-64bit 3.4.4.2\notebooks\docs") import sys import string import random # global variables suffix_map = {} # map prefixes list of suffixes prefix = () # current tuple of words def process_file(filename, order=2): """reads file , performs markov analysis. filename: string order: integer number of words in prefix returns: map prefix list of possible suffixes. """ fp = open(filename) skip_gutenberg_header(fp) line in fp: word in line.rstrip().split(): process_word(word, order) def skip_gutenberg_header(fp): """reads fp until finds line ends header. fp: open file object """ line in fp: if line.startswith('*end*the small print!'): break def process_word(word, order=2): """processes each word. word: string order: integer during first few iterations, store words; after start adding entries dictionary. """ global prefix if len(prefix) < order: prefix += (word,) return try: suffix_map[prefix].append(word) except keyerror: # if there no entry prefix, make 1 suffix_map[prefix] = [word] prefix = shift(prefix, word) def random_text(n=100): """generates random wordsfrom analyzed text. starts random prefix dictionary. n: number of words generate """ # choose random prefix (not weighted frequency) start = random.choice(list(suffix_map.keys())) in range(n): suffixes = suffix_map.get(start, none) if suffixes == none: # if start isn't in map, got end of # original text, have start again. random_text(n-i) return # choose random suffix word = random.choice(suffixes) print(word, end=' ') start = shift(start, word) def shift(t, word): """forms new tuple removing head , adding word tail. t: tuple of strings word: string returns: tuple of strings """ return t[1:] + (word,) def main(script, filename='emma.txt', n=100, order=2): try: n = int(n) order = int(order) except valueerror: print('usage: %d filename [# of words] [prefix length]' % script) else: process_file(filename, order) random_text(n) print() if __name__ == '__main__': main(*sys.argv)
Comments
Post a Comment