test-tokenizer.R 1.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758
  1. test_that("tokenizer tokenizes #1", {
  2. res <- tokenize("(+ 1 2232)")
  3. expect_equal(res, c("(", "+", "1", "2232", ")"))
  4. })
  5. test_that("tokenizer tokenizes #2", {
  6. res <- tokenize("(+ abc \"abc\")")
  7. expect_equal(res, c("(", "+", "abc", "\"abc\"", ")"))
  8. })
  9. test_that("tokenizer tokenizes #3", {
  10. res <- tokenize("(fun [a b c] (+ a b c))")
  11. expect_equal(res, c(
  12. "(", "fun", "[", "a", "b", "c", "]",
  13. "(", "+", "a", "b", "c", ")", ")"
  14. ))
  15. })
  16. test_that("tokenizer supports comments", {
  17. res <- tokenize("(defn [a b c]
  18. ; This is a comment
  19. (+ a b c)
  20. )")
  21. expect_equal(res, c(
  22. "(", "defn", "[", "a", "b", "c", "]",
  23. "(", "+", "a", "b", "c", ")", ")"
  24. ))
  25. })
  26. test_that("tokenizer supports multiple expressions", {
  27. res <- tokenize("
  28. (+ 1 2)
  29. (* 1 2)
  30. ")
  31. expect_equal(res, c("(", "+", "1", "2", ")", "(", "*", "1", "2", ")"))
  32. })
  33. test_that("syntax quote special case", {
  34. res <- tokenize("`~1")
  35. expect_equal(res, c("`", "~", "1"))
  36. res <- tokenize("`(~@[1 2])")
  37. expect_equal(res, c("`", "(", "~@", "[", "1", "2", "]", ")"))
  38. })
  39. test_that("hash token", {
  40. res <- tokenize("#_1")
  41. expect_equal(res, c("#", "_", "1"))
  42. })
  43. test_that("[[ is tokenized correctly", {
  44. res <- tokenize("r/`[[`")
  45. expect_equal(res, "r/`[[`")
  46. res <- tokenize("r/[[")
  47. expect_equal(res, "r/[[")
  48. res <- tokenize("(r/[[)")
  49. expect_equal(res, c("(", "r/[[", ")"))
  50. })