Ticket #2594: template_whitespace-2006-08-22.diff

File template_whitespace-2006-08-22.diff, 5.3 KB (added by Gary Wilson <gary.wilson@…>, 9 years ago)
  • django/template/__init__.py

     
    6666TOKEN_TEXT = 0
    6767TOKEN_VAR = 1
    6868TOKEN_BLOCK = 2
     69TOKEN_ENDLINE = 3
    6970
    7071# template syntax constants
    7172FILTER_SEPARATOR = '|'
     
    8586UNKNOWN_SOURCE="&lt;unknown source&gt;"
    8687
    8788# match a variable or block tag and capture the entire tag, including start/end delimiters
    88 tag_re = re.compile('(%s.*?%s|%s.*?%s)' % (re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END),
     89tag_re = re.compile('(%s.*?%s|%s.*?%s|\n)' % (re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END),
    8990                                          re.escape(VARIABLE_TAG_START), re.escape(VARIABLE_TAG_END)))
     91whitespace_only_re = re.compile('^\s+$')
    9092
    9193# global dictionary of libraries that have been loaded using get_library
    9294libraries = {}
     
    157159def compile_string(template_string, origin):
    158160    "Compiles template_string into NodeList ready for rendering"
    159161    lexer = lexer_factory(template_string, origin)
    160     parser = parser_factory(lexer.tokenize())
     162    tokens = lexer.tokenize()
     163    tokens = trim_whitespace(tokens)
     164    parser = parser_factory(tokens)
    161165    return parser.parse()
    162166
     167def trim_whitespace(tokens):
     168    num_tokens = len(tokens)
     169    keep_tokens = []
     170    line_start = 0
     171    for pos, token in enumerate(tokens):
     172        is_endline = token.token_type is TOKEN_ENDLINE
     173        is_last_token = pos is (num_tokens-1)
     174        # If we have reached the end of a line or the end of the file then
     175        # process the line.
     176        if is_endline or is_last_token:
     177            # Add one to the slice to include the endline token we are currently at.
     178            keep_tokens += get_line_tokens(tokens[line_start:pos+1])
     179            # The next line starts at the next token.
     180            line_start = pos + 1
     181            continue
     182    return keep_tokens
     183
     184def get_line_tokens(tokens):
     185    line_has_block = False
     186    # We don't need to look at the last (TOKEN_ENDLINE) token.
     187    for token in tokens[:-1]:
     188        if token.token_type is TOKEN_TEXT:
     189            # Keep the line if we have non-whitespace text.
     190            if not whitespace_only_re.match(token.contents):
     191                return tokens
     192        elif token.token_type is TOKEN_BLOCK:
     193            line_has_block = True
     194    # If we are at this point then the line only contains whitespace, blocks,
     195    # and variables.  Continuing on...
     196    # If the line doesn't have blocks then it must only have variables and
     197    # whitespace, return the whole line.  This also catches the case where
     198    # the line consists of only a TOKEN_ENDLINE.
     199    if not line_has_block:
     200        return tokens
     201    # At this point we have only blocks and/or text and know we don't want the endline.
     202    del tokens[-1]
     203    # We also don't wan't beginning and trailing whitespace on the line.
     204    start = first_non_whitespace_token(tokens)
     205    tokens.reverse()
     206    from_end = first_non_whitespace_token(tokens)
     207    tokens.reverse()
     208    return tokens[start:len(tokens)-from_end]
     209
     210def first_non_whitespace_token(tokens):
     211    pos = 0
     212    for token in tokens:
     213        if token.token_type is TOKEN_TEXT and whitespace_only_re.match(token.contents):
     214            pos += 1
     215        else:
     216            break
     217    return pos
     218
    163219class Token(object):
    164220    def __init__(self, token_type, contents):
    165         "The token_type must be TOKEN_TEXT, TOKEN_VAR or TOKEN_BLOCK"
     221        "The token_type must be TOKEN_TEXT, TOKEN_VAR or TOKEN_BLOCK, or TOKEN_ENDLINE."
    166222        self.token_type, self.contents = token_type, contents
    167223
    168224    def __str__(self):
    169225        return '<%s token: "%s...">' % \
    170             ({TOKEN_TEXT: 'Text', TOKEN_VAR: 'Var', TOKEN_BLOCK: 'Block'}[self.token_type],
     226            ({TOKEN_TEXT: 'Text', TOKEN_VAR: 'Var', TOKEN_BLOCK: 'Block', TOKEN_ENDLINE: 'Endline'}[self.token_type],
    171227            self.contents[:20].replace('\n', ''))
    172228
    173229    def split_contents(self):
     
    190246            token = Token(TOKEN_VAR, token_string[len(VARIABLE_TAG_START):-len(VARIABLE_TAG_END)].strip())
    191247        elif token_string.startswith(BLOCK_TAG_START):
    192248            token = Token(TOKEN_BLOCK, token_string[len(BLOCK_TAG_START):-len(BLOCK_TAG_END)].strip())
     249        elif token_string == '\n':
     250            token = Token(TOKEN_ENDLINE, token_string)
    193251        else:
    194252            token = Token(TOKEN_TEXT, token_string)
    195253        return token
     
    233291            token = self.next_token()
    234292            if token.token_type == TOKEN_TEXT:
    235293                self.extend_nodelist(nodelist, TextNode(token.contents), token)
     294            elif token.token_type == TOKEN_ENDLINE:
     295                self.extend_nodelist(nodelist, EndlineNode(token.contents), token)
    236296            elif token.token_type == TOKEN_VAR:
    237297                if not token.contents:
    238298                    self.empty_variable(token)
     
    726786    def render(self, context):
    727787        return self.s
    728788
     789class EndlineNode(Node):
     790    def __init__(self, s):
     791        self.s = s
     792
     793    def __repr__(self):
     794        return "<Endline Node: %r>" % self.s
     795
     796    def render(self, context):
     797        return self.s
     798
    729799class VariableNode(Node):
    730800    def __init__(self, filter_expression):
    731801        self.filter_expression = filter_expression
Back to Top