style(3p/lisp): expand tabs in npg, mime4cl and sclf
Done using find third_party/lisp/{sclf,mime4cl,npg} \ -name '*.lisp' -or -name '*.asd' \ -exec bash -c 'expand -i -t 8 "$0" | sponge "$0"' {} \; Change-Id: If84afac9c1d5cbc74e137a5aa0ae61472f0f1e90 Reviewed-on: https://cl.tvl.fyi/c/depot/+/5066 Tested-by: BuildkiteCI Reviewed-by: sterni <sternenseemann@systemli.org>
This commit is contained in:
parent
40014c70b3
commit
25cb0ad32f
25 changed files with 2467 additions and 2467 deletions
366
third_party/lisp/npg/src/define.lisp
vendored
366
third_party/lisp/npg/src/define.lisp
vendored
|
@ -37,13 +37,13 @@ those that are not declared as strings in the grammar.")
|
|||
the list of variables for the function reducing this production, those
|
||||
that are non static and their unambiguous user-friendly names."
|
||||
(flet ((unique (sym list)
|
||||
(if (not (assoc sym list))
|
||||
sym
|
||||
(loop
|
||||
for i of-type fixnum from 2
|
||||
for x = (intern (format nil "~:@(~A~)~A" sym i))
|
||||
while (assoc x list)
|
||||
finally (return x)))))
|
||||
(if (not (assoc sym list))
|
||||
sym
|
||||
(loop
|
||||
for i of-type fixnum from 2
|
||||
for x = (intern (format nil "~:@(~A~)~A" sym i))
|
||||
while (assoc x list)
|
||||
finally (return x)))))
|
||||
(loop
|
||||
for tok in tokens
|
||||
for i of-type fixnum from 1
|
||||
|
@ -54,8 +54,8 @@ that are non static and their unambiguous user-friendly names."
|
|||
and when (symbolp tok)
|
||||
collect (list (unique tok named-vars) arg) into named-vars
|
||||
when (and (listp tok)
|
||||
(symbolp (cadr tok)))
|
||||
collect (list (unique (cadr tok) named-vars) arg) into named-vars
|
||||
(symbolp (cadr tok)))
|
||||
collect (list (unique (cadr tok) named-vars) arg) into named-vars
|
||||
finally
|
||||
(return (values args vars named-vars)))))
|
||||
|
||||
|
@ -63,56 +63,56 @@ that are non static and their unambiguous user-friendly names."
|
|||
"Create a function with name NAME, arguments derived from TOKENS and
|
||||
body ACTION. Return it's definition."
|
||||
(let ((function
|
||||
(multiple-value-bind (args vars named-vars)
|
||||
(make-action-arguments tokens)
|
||||
`(lambda ,args
|
||||
(declare (ignorable ,@args))
|
||||
(let (($vars (list ,@vars))
|
||||
($all (list ,@args))
|
||||
,@named-vars
|
||||
($alist (list ,@(mapcar #'(lambda (v)
|
||||
`(cons ',(intern (symbol-name (car v)))
|
||||
,(cadr v)))
|
||||
named-vars))))
|
||||
(declare (ignorable $vars $all $alist ,@(mapcar #'car named-vars)))
|
||||
(flet ((make-object (&optional type args)
|
||||
(apply #'make-instance (or type ',name)
|
||||
(append args $alist))))
|
||||
,action))))))
|
||||
(multiple-value-bind (args vars named-vars)
|
||||
(make-action-arguments tokens)
|
||||
`(lambda ,args
|
||||
(declare (ignorable ,@args))
|
||||
(let (($vars (list ,@vars))
|
||||
($all (list ,@args))
|
||||
,@named-vars
|
||||
($alist (list ,@(mapcar #'(lambda (v)
|
||||
`(cons ',(intern (symbol-name (car v)))
|
||||
,(cadr v)))
|
||||
named-vars))))
|
||||
(declare (ignorable $vars $all $alist ,@(mapcar #'car named-vars)))
|
||||
(flet ((make-object (&optional type args)
|
||||
(apply #'make-instance (or type ',name)
|
||||
(append args $alist))))
|
||||
,action))))))
|
||||
(when *compile-print*
|
||||
(if *compile-verbose*
|
||||
(format t "; Compiling ~S:~% ~S~%" name function)
|
||||
(format t "; Compiling ~S~%" name)))
|
||||
(format t "; Compiling ~S:~% ~S~%" name function)
|
||||
(format t "; Compiling ~S~%" name)))
|
||||
(compile name function)))
|
||||
|
||||
(defun define-rule (name productions)
|
||||
"Accept a rule in EBNF-like syntax, translate it into a sexp and a
|
||||
call to INSERT-RULE-IN-CURRENT-GRAMMAR."
|
||||
(flet ((transform (productions)
|
||||
(loop
|
||||
for tok in productions
|
||||
with prod = nil
|
||||
with action = nil
|
||||
with phase = nil
|
||||
with new-prods = nil
|
||||
while tok
|
||||
do (cond ((eq tok :=)
|
||||
(push (list (nreverse prod) action) new-prods)
|
||||
(setf prod nil
|
||||
action nil
|
||||
phase :prod))
|
||||
((eq tok :reduce)
|
||||
(setf phase :action))
|
||||
((eq tok :tag)
|
||||
(setf phase :tag))
|
||||
((eq phase :tag)
|
||||
(setf action `(cons ,tok $vars)))
|
||||
((eq phase :action)
|
||||
(setf action tok))
|
||||
((eq phase :prod)
|
||||
(push tok prod)))
|
||||
finally
|
||||
(return (cdr (nreverse (cons (list (nreverse prod) action) new-prods)))))))
|
||||
(loop
|
||||
for tok in productions
|
||||
with prod = nil
|
||||
with action = nil
|
||||
with phase = nil
|
||||
with new-prods = nil
|
||||
while tok
|
||||
do (cond ((eq tok :=)
|
||||
(push (list (nreverse prod) action) new-prods)
|
||||
(setf prod nil
|
||||
action nil
|
||||
phase :prod))
|
||||
((eq tok :reduce)
|
||||
(setf phase :action))
|
||||
((eq tok :tag)
|
||||
(setf phase :tag))
|
||||
((eq phase :tag)
|
||||
(setf action `(cons ,tok $vars)))
|
||||
((eq phase :action)
|
||||
(setf action tok))
|
||||
((eq phase :prod)
|
||||
(push tok prod)))
|
||||
finally
|
||||
(return (cdr (nreverse (cons (list (nreverse prod) action) new-prods)))))))
|
||||
(insert-rule-in-current-grammar name (transform productions))))
|
||||
|
||||
(defmacro defrule (name &rest productions)
|
||||
|
@ -124,9 +124,9 @@ call to INSERT-RULE-IN-CURRENT-GRAMMAR."
|
|||
return it."
|
||||
(insert-rule-in-current-grammar
|
||||
(gensym (concatenate 'string "OPT-"
|
||||
(if (rule-p token)
|
||||
(symbol-name (rule-name token))
|
||||
(string-upcase token))))
|
||||
(if (rule-p token)
|
||||
(symbol-name (rule-name token))
|
||||
(string-upcase token))))
|
||||
`(((,token)) (()))))
|
||||
|
||||
(defun make-alternative-rule (tokens)
|
||||
|
@ -134,24 +134,24 @@ return it."
|
|||
(insert-rule-in-current-grammar
|
||||
(gensym "ALT")
|
||||
(mapcar #'(lambda (alternative)
|
||||
`((,alternative)))
|
||||
tokens)))
|
||||
`((,alternative)))
|
||||
tokens)))
|
||||
|
||||
(defun make-nonempty-list-rule (token &optional separator)
|
||||
"Make a rule for a non-empty list (+ syntax) and return it."
|
||||
(let ((rule-name (gensym (concatenate 'string "NELST-"
|
||||
(if (rule-p token)
|
||||
(symbol-name (rule-name token))
|
||||
(string-upcase token))))))
|
||||
(if (rule-p token)
|
||||
(symbol-name (rule-name token))
|
||||
(string-upcase token))))))
|
||||
(insert-rule-in-current-grammar
|
||||
rule-name
|
||||
(if separator
|
||||
`(((,token ,separator ,rule-name)
|
||||
(cons $1 $3))
|
||||
((,token) ,#'list))
|
||||
`(((,token ,rule-name)
|
||||
(cons $1 $2))
|
||||
((,token) ,#'list))))))
|
||||
`(((,token ,separator ,rule-name)
|
||||
(cons $1 $3))
|
||||
((,token) ,#'list))
|
||||
`(((,token ,rule-name)
|
||||
(cons $1 $2))
|
||||
((,token) ,#'list))))))
|
||||
|
||||
(defun make-list-rule (token &optional separator)
|
||||
"Make a rule for a possibly empty list (* syntax) return it."
|
||||
|
@ -166,14 +166,14 @@ return it."
|
|||
or (* NAME) or (+ NAME). This is used by the DEFRULE macro."
|
||||
(if (symbolp tok)
|
||||
(let* ((name (symbol-name tok))
|
||||
(last (char name (1- (length name))))
|
||||
;; this looks silly but we need to make sure that we
|
||||
;; return symbols interned in this package, no one else
|
||||
(op (cadr (assoc last '((#\? ?) (#\+ +) (#\* *))))))
|
||||
(if (and (> (length name) 1) op)
|
||||
(list op
|
||||
(intern (subseq name 0 (1- (length name)))))
|
||||
tok))
|
||||
(last (char name (1- (length name))))
|
||||
;; this looks silly but we need to make sure that we
|
||||
;; return symbols interned in this package, no one else
|
||||
(op (cadr (assoc last '((#\? ?) (#\+ +) (#\* *))))))
|
||||
(if (and (> (length name) 1) op)
|
||||
(list op
|
||||
(intern (subseq name 0 (1- (length name)))))
|
||||
tok))
|
||||
tok))
|
||||
|
||||
(defun EBNF-to-SEBNF (tokens)
|
||||
|
@ -184,10 +184,10 @@ EBNF syntax into a sexp-based EBNF syntax or SEBNF."
|
|||
for token = (expand-production-token tok)
|
||||
with new-tokens = '()
|
||||
do (cond ((member token '(* + ?))
|
||||
(setf (car new-tokens)
|
||||
(list token (car new-tokens))))
|
||||
(t
|
||||
(push token new-tokens)))
|
||||
(setf (car new-tokens)
|
||||
(list token (car new-tokens))))
|
||||
(t
|
||||
(push token new-tokens)))
|
||||
finally (return (nreverse new-tokens))))
|
||||
|
||||
(defun SEBNF-to-BNF (tokens)
|
||||
|
@ -195,21 +195,21 @@ EBNF syntax into a sexp-based EBNF syntax or SEBNF."
|
|||
it into BNF. The production is simplified but the current grammar is
|
||||
populated with additional rules."
|
||||
(flet ((make-complex-token-rule (tok)
|
||||
(ecase (car tok)
|
||||
(* (apply #'make-list-rule (cdr tok)))
|
||||
(+ (apply #'make-nonempty-list-rule (cdr tok)))
|
||||
(? (make-optional-rule (cadr tok)))
|
||||
(or (make-alternative-rule (cdr tok))))))
|
||||
(ecase (car tok)
|
||||
(* (apply #'make-list-rule (cdr tok)))
|
||||
(+ (apply #'make-nonempty-list-rule (cdr tok)))
|
||||
(? (make-optional-rule (cadr tok)))
|
||||
(or (make-alternative-rule (cdr tok))))))
|
||||
(loop
|
||||
for token in tokens
|
||||
with new-tokens = '()
|
||||
with keywords = '()
|
||||
do (cond ((listp token)
|
||||
(push (make-complex-token-rule token) new-tokens))
|
||||
(t
|
||||
(push token new-tokens)
|
||||
(when (const-terminal-p token)
|
||||
(push token keywords))))
|
||||
(push (make-complex-token-rule token) new-tokens))
|
||||
(t
|
||||
(push token new-tokens)
|
||||
(when (const-terminal-p token)
|
||||
(push token keywords))))
|
||||
finally (return (values (nreverse new-tokens) keywords)))))
|
||||
|
||||
(defun make-default-action-function (name tokens)
|
||||
|
@ -220,28 +220,28 @@ list and in case only a variable token is available that one is
|
|||
returned (not included in a list). If all the tokens are
|
||||
constant, then all of them are returned in a list."
|
||||
(cond ((null tokens)
|
||||
;; if the production matched the empty list (no tokens) we
|
||||
;; return always nil, that is the function LIST applied to no
|
||||
;; arguments
|
||||
#'list)
|
||||
((null (cdr tokens))
|
||||
;; if the production matches just one token we simply return
|
||||
;; that
|
||||
#'identity)
|
||||
(*smart-default-reduction*
|
||||
;; If we are required to be "smart" then create a function
|
||||
;; that simply returns the non static tokens of the
|
||||
;; production. If the production doesn't have nonterminal,
|
||||
;; then return all the tokens. If the production has only
|
||||
;; one argument then return that one only.
|
||||
(make-action-function name tokens '(cond
|
||||
((null $vars) $all)
|
||||
((null (cdr $vars)) (car $vars))
|
||||
(t $vars))))
|
||||
(t
|
||||
;; in all the other cases we return all the token matching
|
||||
;; the production
|
||||
#'list)))
|
||||
;; if the production matched the empty list (no tokens) we
|
||||
;; return always nil, that is the function LIST applied to no
|
||||
;; arguments
|
||||
#'list)
|
||||
((null (cdr tokens))
|
||||
;; if the production matches just one token we simply return
|
||||
;; that
|
||||
#'identity)
|
||||
(*smart-default-reduction*
|
||||
;; If we are required to be "smart" then create a function
|
||||
;; that simply returns the non static tokens of the
|
||||
;; production. If the production doesn't have nonterminal,
|
||||
;; then return all the tokens. If the production has only
|
||||
;; one argument then return that one only.
|
||||
(make-action-function name tokens '(cond
|
||||
((null $vars) $all)
|
||||
((null (cdr $vars)) (car $vars))
|
||||
(t $vars))))
|
||||
(t
|
||||
;; in all the other cases we return all the token matching
|
||||
;; the production
|
||||
#'list)))
|
||||
|
||||
(defun make-production-from-descr (name production-description)
|
||||
"Take a production NAME and its description in the form of a sexp
|
||||
|
@ -250,28 +250,28 @@ keywords."
|
|||
(destructuring-bind (tokens &optional action) production-description
|
||||
(let ((expanded-tokens (EBNF-to-SEBNF tokens)))
|
||||
(multiple-value-bind (production-tokens keywords)
|
||||
(sebnf-to-bnf expanded-tokens)
|
||||
(sebnf-to-bnf expanded-tokens)
|
||||
(let ((funct
|
||||
(cond ((not action)
|
||||
(make-default-action-function name expanded-tokens))
|
||||
((or (listp action)
|
||||
;; the case when the action is simply to
|
||||
;; return a token (ie $2) or a constant value
|
||||
(symbolp action))
|
||||
(make-action-function name expanded-tokens action))
|
||||
((functionp action)
|
||||
action)
|
||||
(t ; action is a constant
|
||||
#'(lambda (&rest args)
|
||||
(declare (ignore args))
|
||||
action)))))
|
||||
(values
|
||||
;; Make a promise instead of actually resolving the
|
||||
;; nonterminals. This avoids endless recursion.
|
||||
(make-production :tokens production-tokens
|
||||
:tokens-length (length production-tokens)
|
||||
:action funct)
|
||||
keywords))))))
|
||||
(cond ((not action)
|
||||
(make-default-action-function name expanded-tokens))
|
||||
((or (listp action)
|
||||
;; the case when the action is simply to
|
||||
;; return a token (ie $2) or a constant value
|
||||
(symbolp action))
|
||||
(make-action-function name expanded-tokens action))
|
||||
((functionp action)
|
||||
action)
|
||||
(t ; action is a constant
|
||||
#'(lambda (&rest args)
|
||||
(declare (ignore args))
|
||||
action)))))
|
||||
(values
|
||||
;; Make a promise instead of actually resolving the
|
||||
;; nonterminals. This avoids endless recursion.
|
||||
(make-production :tokens production-tokens
|
||||
:tokens-length (length production-tokens)
|
||||
:action funct)
|
||||
keywords))))))
|
||||
|
||||
(defun remove-immediate-left-recursivity (rule)
|
||||
"Turn left recursive rules of the type
|
||||
|
@ -281,7 +281,7 @@ into
|
|||
A2 -> x A2 | E
|
||||
where E is the empty production."
|
||||
(let ((name (rule-name rule))
|
||||
(productions (rule-productions rule)))
|
||||
(productions (rule-productions rule)))
|
||||
(loop
|
||||
for prod in productions
|
||||
for tokens = (prod-tokens prod)
|
||||
|
@ -291,40 +291,40 @@ where E is the empty production."
|
|||
else
|
||||
collect prod into non-left-recursive
|
||||
finally
|
||||
;; found any left recursive production?
|
||||
(when left-recursive
|
||||
(warn "rule ~S is left recursive" name)
|
||||
(let ((new-rule (make-rule :name (gensym "REWRITE"))))
|
||||
;; A -> y A2
|
||||
(setf (rule-productions rule)
|
||||
(mapcar #'(lambda (p)
|
||||
(let ((tokens (prod-tokens p))
|
||||
(action (prod-action p)))
|
||||
(make-production :tokens (append tokens (list new-rule))
|
||||
:tokens-length (1+ (prod-tokens-length p))
|
||||
:action #'(lambda (&rest args)
|
||||
(let ((f-A2 (car (last args)))
|
||||
(head (butlast args)))
|
||||
(funcall f-A2 (apply action head)))))))
|
||||
non-left-recursive))
|
||||
;; A2 -> x A2 | E
|
||||
(setf (rule-productions new-rule)
|
||||
(append
|
||||
(mapcar #'(lambda (p)
|
||||
(let ((tokens (prod-tokens p))
|
||||
(action (prod-action p)))
|
||||
(make-production :tokens (append (cdr tokens) (list new-rule))
|
||||
:tokens-length (prod-tokens-length p)
|
||||
:action #'(lambda (&rest args)
|
||||
(let ((f-A2 (car (last args)))
|
||||
(head (butlast args)))
|
||||
#'(lambda (x)
|
||||
(funcall f-A2 (apply action x head))))))))
|
||||
left-recursive)
|
||||
(list
|
||||
(make-production :tokens nil
|
||||
:tokens-length 0
|
||||
:action #'(lambda () #'(lambda (arg) arg)))))))))))
|
||||
;; found any left recursive production?
|
||||
(when left-recursive
|
||||
(warn "rule ~S is left recursive" name)
|
||||
(let ((new-rule (make-rule :name (gensym "REWRITE"))))
|
||||
;; A -> y A2
|
||||
(setf (rule-productions rule)
|
||||
(mapcar #'(lambda (p)
|
||||
(let ((tokens (prod-tokens p))
|
||||
(action (prod-action p)))
|
||||
(make-production :tokens (append tokens (list new-rule))
|
||||
:tokens-length (1+ (prod-tokens-length p))
|
||||
:action #'(lambda (&rest args)
|
||||
(let ((f-A2 (car (last args)))
|
||||
(head (butlast args)))
|
||||
(funcall f-A2 (apply action head)))))))
|
||||
non-left-recursive))
|
||||
;; A2 -> x A2 | E
|
||||
(setf (rule-productions new-rule)
|
||||
(append
|
||||
(mapcar #'(lambda (p)
|
||||
(let ((tokens (prod-tokens p))
|
||||
(action (prod-action p)))
|
||||
(make-production :tokens (append (cdr tokens) (list new-rule))
|
||||
:tokens-length (prod-tokens-length p)
|
||||
:action #'(lambda (&rest args)
|
||||
(let ((f-A2 (car (last args)))
|
||||
(head (butlast args)))
|
||||
#'(lambda (x)
|
||||
(funcall f-A2 (apply action x head))))))))
|
||||
left-recursive)
|
||||
(list
|
||||
(make-production :tokens nil
|
||||
:tokens-length 0
|
||||
:action #'(lambda () #'(lambda (arg) arg)))))))))))
|
||||
|
||||
(defun remove-left-recursivity-from-rules (rules)
|
||||
(loop
|
||||
|
@ -338,9 +338,9 @@ where E is the empty production."
|
|||
(loop
|
||||
for rule being each hash-value in rules
|
||||
do (loop
|
||||
for production in (rule-productions rule)
|
||||
do (setf (prod-tokens production)
|
||||
(resolve-nonterminals (prod-tokens production) rules)))))
|
||||
for production in (rule-productions rule)
|
||||
do (setf (prod-tokens production)
|
||||
(resolve-nonterminals (prod-tokens production) rules)))))
|
||||
|
||||
(defun make-rule-productions (rule-name production-descriptions)
|
||||
"Return a production object that belongs to RULE-NAME made according
|
||||
|
@ -352,12 +352,12 @@ to PRODUCTION-DESCRIPTIONS. See also MAKE-PRODUCTION-FROM-DESCR."
|
|||
with productions = '()
|
||||
with keywords = '()
|
||||
do (progn
|
||||
(multiple-value-bind (production keyws)
|
||||
(make-production-from-descr prod-name descr)
|
||||
(push production productions)
|
||||
(setf keywords (append keyws keywords))))
|
||||
(multiple-value-bind (production keyws)
|
||||
(make-production-from-descr prod-name descr)
|
||||
(push production productions)
|
||||
(setf keywords (append keyws keywords))))
|
||||
finally (return
|
||||
(values (nreverse productions) keywords))))
|
||||
(values (nreverse productions) keywords))))
|
||||
|
||||
(defun create-rule (name production-descriptions)
|
||||
"Return a new rule object together with a list of keywords making up
|
||||
|
@ -365,7 +365,7 @@ the production definitions."
|
|||
(multiple-value-bind (productions keywords)
|
||||
(make-rule-productions name production-descriptions)
|
||||
(values (make-rule :name name :productions productions)
|
||||
keywords)))
|
||||
keywords)))
|
||||
|
||||
(defun insert-rule-in-current-grammar (name productions)
|
||||
"Add rule to the current grammar and its keywords to the keywords
|
||||
|
@ -384,18 +384,18 @@ instead."
|
|||
"Given a list of production tokens, try to expand the nonterminal
|
||||
ones with their respective rule from the the RULES pool."
|
||||
(flet ((resolve-symbol (sym)
|
||||
(or (find-rule sym rules)
|
||||
sym)))
|
||||
(or (find-rule sym rules)
|
||||
sym)))
|
||||
(mapcar #'(lambda (tok)
|
||||
(if (symbolp tok)
|
||||
(resolve-symbol tok)
|
||||
tok))
|
||||
tokens)))
|
||||
(if (symbolp tok)
|
||||
(resolve-symbol tok)
|
||||
tok))
|
||||
tokens)))
|
||||
|
||||
(defun reset-grammar ()
|
||||
"Empty the current grammar from any existing rule."
|
||||
(setf *rules* (make-rules-table)
|
||||
*keywords* (make-keywords-table)))
|
||||
*keywords* (make-keywords-table)))
|
||||
|
||||
(defun generate-grammar (&optional (equal-p #'string-equal))
|
||||
"Return a GRAMMAR structure suitable for the PARSE function, using
|
||||
|
@ -404,5 +404,5 @@ match the input tokens; it defaults to STRING-EQUAL."
|
|||
(resolve-all-nonterminals *rules*)
|
||||
(remove-left-recursivity-from-rules *rules*)
|
||||
(make-grammar :rules *rules*
|
||||
:keywords *keywords*
|
||||
:equal-p equal-p))
|
||||
:keywords *keywords*
|
||||
:equal-p equal-p))
|
||||
|
|
238
third_party/lisp/npg/src/parser.lisp
vendored
238
third_party/lisp/npg/src/parser.lisp
vendored
|
@ -43,9 +43,9 @@ Tune this if your grammar is unusually complex.")
|
|||
(when *debug*
|
||||
(format *debug* "reducing ~S on ~S~%" production arguments))
|
||||
(flet ((safe-token-value (token)
|
||||
(if (token-p token)
|
||||
(token-value token)
|
||||
token)))
|
||||
(if (token-p token)
|
||||
(token-value token)
|
||||
token)))
|
||||
(apply (prod-action production) (mapcar #'safe-token-value arguments))))
|
||||
|
||||
(defgeneric later-position (pos1 pos2)
|
||||
|
@ -75,120 +75,120 @@ supposed to specialise this method."))
|
|||
Return the reduced values according to the nonterminal actions. Raise
|
||||
an error on failure."
|
||||
(declare (type grammar grammar)
|
||||
(type symbol start))
|
||||
(type symbol start))
|
||||
(labels
|
||||
((match-token (expected token)
|
||||
(when *debug*
|
||||
(format *debug* "match-token ~S ~S -> " expected token))
|
||||
(let ((res (cond ((symbolp expected)
|
||||
;; non-costant terminal (like identifiers)
|
||||
(eq expected (token-type token)))
|
||||
((and (stringp expected)
|
||||
(stringp (token-value token)))
|
||||
;; string costant terminal
|
||||
(funcall (the function (grammar-equal-p grammar)) expected (token-value token)))
|
||||
((functionp expected)
|
||||
;; custom equality predicate (must be able
|
||||
;; to deal with token objects)
|
||||
(funcall expected token))
|
||||
;; all the rest
|
||||
(t (equal expected (token-value token))))))
|
||||
(when *debug*
|
||||
(format *debug* "~Amatched~%" (if res "" "not ")))
|
||||
res))
|
||||
(when *debug*
|
||||
(format *debug* "match-token ~S ~S -> " expected token))
|
||||
(let ((res (cond ((symbolp expected)
|
||||
;; non-costant terminal (like identifiers)
|
||||
(eq expected (token-type token)))
|
||||
((and (stringp expected)
|
||||
(stringp (token-value token)))
|
||||
;; string costant terminal
|
||||
(funcall (the function (grammar-equal-p grammar)) expected (token-value token)))
|
||||
((functionp expected)
|
||||
;; custom equality predicate (must be able
|
||||
;; to deal with token objects)
|
||||
(funcall expected token))
|
||||
;; all the rest
|
||||
(t (equal expected (token-value token))))))
|
||||
(when *debug*
|
||||
(format *debug* "~Amatched~%" (if res "" "not ")))
|
||||
res))
|
||||
(match (expected matched #+debug depth)
|
||||
(declare (list expected matched)
|
||||
#+debug (fixnum depth))
|
||||
(let ((first-expected (car expected)))
|
||||
(cond #+debug ((> depth *maximum-recursion-depth*)
|
||||
(error "endless recursion on ~A ~A at ~A expecting ~S"
|
||||
(token-type (car matched)) (token-value (car matched))
|
||||
(token-position (car matched)) expected))
|
||||
((eq first-expected :any)
|
||||
(match (cdr expected) (cdr matched) #+debug depth))
|
||||
;; This is a trick to obtain partial parses. When we
|
||||
;; reach this expected token we assume we succeeded
|
||||
;; the parsing and return the remaining tokens as
|
||||
;; part of the match.
|
||||
((eq first-expected :rest)
|
||||
;; we could be at the end of input so we check this
|
||||
(unless (cdr matched)
|
||||
(setf (cdr matched) (list :rest)))
|
||||
(list nil nil))
|
||||
((rule-p first-expected)
|
||||
;; If it's a rule, then we try to match all its
|
||||
;; productions. We return the first that succeeds.
|
||||
(loop
|
||||
for production in (rule-productions first-expected)
|
||||
for production-tokens of-type list = (prod-tokens production)
|
||||
with last-error-position = nil
|
||||
with last-error = nil
|
||||
for (error-position error-descr) =
|
||||
(progn
|
||||
(when *debug*
|
||||
(format *debug* "trying to match ~A: ~S~%"
|
||||
(rule-name first-expected) production-tokens))
|
||||
(match (append production-tokens (cdr expected)) matched #+debug (1+ depth)))
|
||||
do (cond ((not error-position)
|
||||
(return (let ((args-count (prod-tokens-length production)))
|
||||
(setf (cdr matched)
|
||||
(cons (reduce-production
|
||||
production
|
||||
(subseq (the list (cdr matched)) 0 args-count))
|
||||
(nthcdr (1+ args-count) matched)))
|
||||
(list nil nil))))
|
||||
((or (not last-error)
|
||||
(later-position error-position last-error-position))
|
||||
(setf last-error-position error-position
|
||||
last-error error-descr)))
|
||||
;; if everything fails return the "best" error
|
||||
finally (return (list last-error-position
|
||||
(if *debug*
|
||||
#'(lambda ()
|
||||
(format nil "~A, trying to match ~A"
|
||||
(funcall (the function last-error))
|
||||
(rule-name first-expected)))
|
||||
last-error)))))
|
||||
(t
|
||||
;; if necessary load the next tokens
|
||||
(when (null (cdr matched))
|
||||
(setf (cdr matched) (read-next-tokens tokenizer)))
|
||||
(cond ((and (or (null expected) (eq first-expected :eof))
|
||||
(null (cdr matched)))
|
||||
;; This point is reached only once for each complete
|
||||
;; parsing. The expected tokens and the input
|
||||
;; tokens have been exhausted at the same time.
|
||||
;; Hence we succeeded the parsing.
|
||||
(setf (cdr matched) (list :eof))
|
||||
(list nil nil))
|
||||
((null expected)
|
||||
;; Garbage at end of parsing. This may mean that we
|
||||
;; have considered a production completed too soon.
|
||||
(list (token-position (car matched))
|
||||
#'(lambda ()
|
||||
"garbage at end of parsing")))
|
||||
((null (cdr matched))
|
||||
;; EOF error
|
||||
(list :eof
|
||||
#'(lambda ()
|
||||
(format nil "end of input expecting ~S" expected))))
|
||||
(t ;; normal token
|
||||
(let ((first-token (cadr matched)))
|
||||
(if (match-token first-expected first-token)
|
||||
(match (cdr expected) (cdr matched) #+debug depth)
|
||||
;; failed: we return the error
|
||||
(list (token-position first-token)
|
||||
#'(lambda ()
|
||||
(format nil "expected ~S but got ~S ~S"
|
||||
first-expected (token-type first-token)
|
||||
(token-value first-token)))))))))))))
|
||||
(declare (list expected matched)
|
||||
#+debug (fixnum depth))
|
||||
(let ((first-expected (car expected)))
|
||||
(cond #+debug ((> depth *maximum-recursion-depth*)
|
||||
(error "endless recursion on ~A ~A at ~A expecting ~S"
|
||||
(token-type (car matched)) (token-value (car matched))
|
||||
(token-position (car matched)) expected))
|
||||
((eq first-expected :any)
|
||||
(match (cdr expected) (cdr matched) #+debug depth))
|
||||
;; This is a trick to obtain partial parses. When we
|
||||
;; reach this expected token we assume we succeeded
|
||||
;; the parsing and return the remaining tokens as
|
||||
;; part of the match.
|
||||
((eq first-expected :rest)
|
||||
;; we could be at the end of input so we check this
|
||||
(unless (cdr matched)
|
||||
(setf (cdr matched) (list :rest)))
|
||||
(list nil nil))
|
||||
((rule-p first-expected)
|
||||
;; If it's a rule, then we try to match all its
|
||||
;; productions. We return the first that succeeds.
|
||||
(loop
|
||||
for production in (rule-productions first-expected)
|
||||
for production-tokens of-type list = (prod-tokens production)
|
||||
with last-error-position = nil
|
||||
with last-error = nil
|
||||
for (error-position error-descr) =
|
||||
(progn
|
||||
(when *debug*
|
||||
(format *debug* "trying to match ~A: ~S~%"
|
||||
(rule-name first-expected) production-tokens))
|
||||
(match (append production-tokens (cdr expected)) matched #+debug (1+ depth)))
|
||||
do (cond ((not error-position)
|
||||
(return (let ((args-count (prod-tokens-length production)))
|
||||
(setf (cdr matched)
|
||||
(cons (reduce-production
|
||||
production
|
||||
(subseq (the list (cdr matched)) 0 args-count))
|
||||
(nthcdr (1+ args-count) matched)))
|
||||
(list nil nil))))
|
||||
((or (not last-error)
|
||||
(later-position error-position last-error-position))
|
||||
(setf last-error-position error-position
|
||||
last-error error-descr)))
|
||||
;; if everything fails return the "best" error
|
||||
finally (return (list last-error-position
|
||||
(if *debug*
|
||||
#'(lambda ()
|
||||
(format nil "~A, trying to match ~A"
|
||||
(funcall (the function last-error))
|
||||
(rule-name first-expected)))
|
||||
last-error)))))
|
||||
(t
|
||||
;; if necessary load the next tokens
|
||||
(when (null (cdr matched))
|
||||
(setf (cdr matched) (read-next-tokens tokenizer)))
|
||||
(cond ((and (or (null expected) (eq first-expected :eof))
|
||||
(null (cdr matched)))
|
||||
;; This point is reached only once for each complete
|
||||
;; parsing. The expected tokens and the input
|
||||
;; tokens have been exhausted at the same time.
|
||||
;; Hence we succeeded the parsing.
|
||||
(setf (cdr matched) (list :eof))
|
||||
(list nil nil))
|
||||
((null expected)
|
||||
;; Garbage at end of parsing. This may mean that we
|
||||
;; have considered a production completed too soon.
|
||||
(list (token-position (car matched))
|
||||
#'(lambda ()
|
||||
"garbage at end of parsing")))
|
||||
((null (cdr matched))
|
||||
;; EOF error
|
||||
(list :eof
|
||||
#'(lambda ()
|
||||
(format nil "end of input expecting ~S" expected))))
|
||||
(t ;; normal token
|
||||
(let ((first-token (cadr matched)))
|
||||
(if (match-token first-expected first-token)
|
||||
(match (cdr expected) (cdr matched) #+debug depth)
|
||||
;; failed: we return the error
|
||||
(list (token-position first-token)
|
||||
#'(lambda ()
|
||||
(format nil "expected ~S but got ~S ~S"
|
||||
first-expected (token-type first-token)
|
||||
(token-value first-token)))))))))))))
|
||||
(declare (inline match-token))
|
||||
(let ((result (list :head)))
|
||||
(destructuring-bind (error-position error)
|
||||
(match (list (find-rule start (grammar-rules grammar))) result #+debug 0)
|
||||
(when error-position
|
||||
(error "~A at ~A~%" (funcall (the function error)) error-position))
|
||||
(cadr result)))))
|
||||
(match (list (find-rule start (grammar-rules grammar))) result #+debug 0)
|
||||
(when error-position
|
||||
(error "~A at ~A~%" (funcall (the function error)) error-position))
|
||||
(cadr result)))))
|
||||
|
||||
(defgeneric terminals-in-grammar (grammar-or-hashtable)
|
||||
(:documentation
|
||||
|
@ -199,11 +199,11 @@ an error on failure."
|
|||
for rule being each hash-value of grammar
|
||||
with terminals = '()
|
||||
do (loop
|
||||
for prod in (rule-productions rule)
|
||||
do (loop
|
||||
for tok in (prod-tokens prod)
|
||||
when (symbolp tok)
|
||||
do (pushnew tok terminals)))
|
||||
for prod in (rule-productions rule)
|
||||
do (loop
|
||||
for tok in (prod-tokens prod)
|
||||
when (symbolp tok)
|
||||
do (pushnew tok terminals)))
|
||||
finally (return terminals)))
|
||||
|
||||
(defmethod terminals-in-grammar ((grammar grammar))
|
||||
|
@ -211,9 +211,9 @@ an error on failure."
|
|||
|
||||
(defun print-grammar-figures (grammar &optional (stream *standard-output*))
|
||||
(format stream "rules: ~A~%constant terminals: ~A~%variable terminals: ~S~%"
|
||||
(hash-table-count (grammar-rules grammar))
|
||||
(hash-table-count (grammar-keywords grammar))
|
||||
(terminals-in-grammar (grammar-rules grammar))))
|
||||
(hash-table-count (grammar-rules grammar))
|
||||
(hash-table-count (grammar-keywords grammar))
|
||||
(terminals-in-grammar (grammar-rules grammar))))
|
||||
|
||||
|
||||
(defun grammar-keyword-p (keyword grammar)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue