Module: dfmc-macro-expander Synopsis: Drive the parser to parse constraints during matching. Author: Keith Playford Copyright: Original Code is Copyright (c) 1995-2004 Functional Objects, Inc. All rights reserved. License: Functional Objects Library Public License Version 1.0 Dual-license: GNU Lesser General Public License Warranty: Distributed WITHOUT WARRANTY OF ANY KIND define method parse-constraint (constraint-start :: , tokens :: ) => (failure, remains, parsed) let all-tokens = pair(constraint-start, tokens); let token-stack = all-tokens; let nesting-stack = #(); // TODO: PERORMANCE: If we only count top level tokens, we could win in // certain situations - if you fail anywhere within a nested fragment, no // subsequence of the nested fragment will win, so rewind back to the // next top level token. let limit = -1; let count = 0; local method lexer () => (f :: ) if (count == limit) $end-constraint; elseif (empty?(token-stack)) if (empty?(nesting-stack)) $end-constraint; else let close/tokens = nesting-stack.head; nesting-stack := nesting-stack.tail; token-stack := close/tokens.tail; count := count + 1; close/tokens.head; end; else let next-token = token-stack.head; token-stack := token-stack.tail; count := count + 1; if (instance?(next-token, )) nesting-stack := pair(pair(fragment-right-delimiter(next-token), token-stack), nesting-stack); token-stack := fragment-nested-fragments(next-token); fragment-left-delimiter(next-token); else next-token end; end; end; block (return) while (#t) block (retry) token-stack := all-tokens; nesting-stack := #(); count := 0; let form = re-read-fragments(lexer, on-error: retry); return(#f, token-stack, form); end block; // Warning: Do not optimise to (count > limit) here. Just because // all the tokens were consumed does not imply a substring won't // parse (e.g. { 1 + }). if (limit == 0) return(#t, #f, #f); else // Try one less token than at the failure point. limit := count - 1; end if; end while; end block; end method; // TODO: CORRECTNESS: The way boundedness is checked is buggy. define method parse-bounded-constraint (constraint-start :: , tokens :: , stop? :: , stop-arg) => (failure, remains :: false-or(), parsed) let all-tokens = pair(constraint-start, tokens); let token-stack = all-tokens; let nesting-stack = #(); // TODO: PERORMANCE: If we only count top level tokens, we could win in // certain situations - if you fail anywhere within a nested fragment, no // subsequence of the nested fragment will win, so rewind back to the // next top level token. let limit = -1; let count = 0; local method lexer () => (f :: ) if (count == limit) $end-constraint; elseif (empty?(token-stack)) if (empty?(nesting-stack)) $end-constraint; else let close/tokens = nesting-stack.head; nesting-stack := nesting-stack.tail; token-stack := close/tokens.tail; count := count + 1; close/tokens.head; end; else let next-token = token-stack.head; if (instance?(next-token, )) token-stack := token-stack.tail; count := count + 1; nesting-stack := pair(pair(fragment-right-delimiter(next-token), token-stack), nesting-stack); token-stack := fragment-nested-fragments(next-token); fragment-left-delimiter(next-token); elseif (nesting-stack == #() & stop?(next-token, stop-arg)) $end-constraint; else token-stack := token-stack.tail; count := count + 1; next-token end; end; end; block (return) while (#t) block (retry) token-stack := all-tokens; nesting-stack := #(); count := 0; let form = re-read-fragments(lexer, on-error: retry); return(#f, token-stack, form); end block; // Warning: Do not optimise to (count > limit) here. Just because // all the tokens were consumed does not imply a substring won't // parse (e.g. { 1 + }). if (limit == 0) return(#t, #f, #f); else // Try one less token than at the failure point. limit := count - 1; end if; end while; end block; end method; define method parse-bounded-constraint-no-backtracking (constraint-start :: , tokens :: , stop? :: , stop-arg) => (failure, remains :: false-or(), parsed) let all-tokens = pair(constraint-start, tokens); let token-stack = all-tokens; let nesting-stack = #(); // TODO: PERORMANCE: If we only count top level tokens, we could win in // certain situations - if you fail anywhere within a nested fragment, no // subsequence of the nested fragment will win, so rewind back to the // next top level token. local method lexer () => (f :: ) if (empty?(token-stack)) if (empty?(nesting-stack)) $end-constraint; else let close/tokens = nesting-stack.head; nesting-stack := nesting-stack.tail; token-stack := close/tokens.tail; close/tokens.head; end; else let next-token = token-stack.head; if (instance?(next-token, )) token-stack := token-stack.tail; nesting-stack := pair(pair(fragment-right-delimiter(next-token), token-stack), nesting-stack); token-stack := fragment-nested-fragments(next-token); fragment-left-delimiter(next-token); elseif (nesting-stack == #() & stop?(next-token, stop-arg)) $end-constraint; else token-stack := token-stack.tail; next-token end; end; end; token-stack := all-tokens; nesting-stack := #(); let form = re-read-fragments(lexer); values(#f, token-stack, form); end method; // Template parsing is simpler, not having delimited nested fragments // to account for, and not requiring backtracking. define serious-program-warning () slot condition-token, required-init-keyword: token:; slot condition-template, required-init-keyword: template:; format-string "Unexpected token %= encountered in macro expansion: %=."; format-arguments token, template; end serious-program-warning; define serious-program-warning () slot condition-template, required-init-keyword: template:; format-string "Unexpected end of macro expansion: %=."; format-arguments template; end serious-program-warning; define method parse-template-fragments-as (constraint-start, f* :: ) => (failure, f) let input-cursor = pair(constraint-start, f*); let nesting-stack = #(); local method lexer () => (f :: ) if (empty?(input-cursor)) if (empty?(nesting-stack)) $end-constraint else let close/tokens = nesting-stack.head; nesting-stack := nesting-stack.tail; input-cursor := close/tokens.tail; let close = close/tokens.head; if (close) close else lexer() end; end; else let next-input = input-cursor.head; input-cursor := input-cursor.tail; if (instance?(next-input,