summaryrefslogtreecommitdiff
path: root/src/lexer-test.lisp
blob: c6f131a86603775450b81b957186c5a561e41959 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
(in-package :monkey)

(defparameter *tests* (make-hash-table))
(defparameter *test-name* nil)

(eval-always
  (defun make-tokens (&rest tokens)
    (loop :for token :in tokens
          :if (consp token)
            :collect (make-token (car token) (cadr token))
          :if (typep token 'token-type)
            :collect (as-token token)))

  (defun test-lexer (str expected-tokens)
    (labels ((expected-next ()
               (prog1 (car expected-tokens)
                 (setf expected-tokens (cdr expected-tokens)))))
      (format t "Testing ~s..." *test-name*)
      (let* ((lexer (make-lexer str))
             (tokens (lexer-tokens lexer))
             (expected-token (expected-next))
             (ok t))
        (loop :for token :in tokens
              :for i :from 1
              :if (not (token= token expected-token))
                :do (format t "~&token ~d: expected ~a but got ~a~%" i expected-token token)
                    (setf ok nil)
              :else :do (setf expected-token (expected-next)))
        (unless (null ok)
          (format t " ok~%")))))

  (defun tokenize (designator)
    (cond ((typep designator 'token-type)
           (as-token designator))
          ((stringp designator)
           (make-token :t/ident designator))
          ((integerp designator)
           (make-token :t/int (princ-to-string designator)))
          ((characterp designator)
           (case designator
             (#\( (as-token :t/lparen))
             (#\) (as-token :t/rparen))
             (#\; (as-token :t/semicolon))
             (#\{ (as-token :t/lbrace))
             (#\} (as-token :t/rbrace))))
          ((consp designator)
           (apply #'make-token designator))))

  (defun define-lexer-test (name string tokens)
    `(progn
       (setf (gethash ',name *tests*) t)
       (defun ,name ()
         (let ((*test-name* ',name))
           (test-lexer ,string (list ,@(mapcar (lambda (tok) `(tokenize ',tok))
                                               tokens)))))))

  (defmacro deftest ((type &optional name) &body args)
    (case type
      (:lexer (apply #'define-lexer-test
                     (or name (gensym "LEXER-TEST"))
                     args)))))

(defun run-tests ()
  (let ((ok t) key val)
    (with-hash-table-iterator (it *tests*)
      (loop :do (multiple-value-setq (ok key val) (it))
            :while ok
            :do (funcall (symbol-function key))))))

(deftest (:lexer test-1)
  "=+(){},;"
  (:t/= :t/+ :t/lparen :t/rparen
   :t/lbrace :t/rbrace :t/comma :t/semicolon :t/eof))

(deftest (:lexer test-2)
  "let five = 5;
let ten = 10;

let add = fn(x, y) {
  x + y;
};

let result = add(five, ten);
!-/*;
5 < 10 > 5;

if (5 < 10) {
    return true;
} else {
    return false;
}
10 == 10;
10 != 9;
"
  (:t/let "five" :t/= 5 #\;
          :t/let "ten" :t/= 10 #\;
          :t/let "add" :t/= :t/function #\( "x" :t/comma "y" #\) #\{
          "x" :t/+ "y" #\;
          #\} #\;
          :t/let "result" :t/= "add" #\( "five" :t/comma "ten" #\) #\;
          :t/! :t/- :t// :t/* #\;
          5 :t/< 10 :t/> 5 #\;
          :t/if #\( 5 :t/< 10 #\) #\{
          :t/return :t/true #\;
          #\} :t/else #\{
          :t/return :t/false #\;
          #\}
          10 :t/== 10 #\;
          10 :t/!= 9 #\;
          :t/eof))


(deftest (:lexer test-fail)
  "abc gf 5 fn =+(){},;"
  (:t/= :t/+ :t/lparen :t/rparen
   :t/lbrace :t/rbrace :t/comma :t/semicolon :t/eof))

(deftest (:lexer test-fail-2)
  "let abc x + 5;"
  (:t/let "abc" :t/= "x" :t/+ 5 :t/eof))