paragraph_test.go 1.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485
  1. package parser
  2. import (
  3. "testing"
  4. "github.com/stretchr/testify/require"
  5. "github.com/usememos/memos/plugin/gomark/parser/tokenizer"
  6. )
  7. func TestParagraphParser(t *testing.T) {
  8. tests := []struct {
  9. text string
  10. paragraph *ParagraphParser
  11. }{
  12. {
  13. text: "",
  14. paragraph: nil,
  15. },
  16. {
  17. text: "Hello world",
  18. paragraph: &ParagraphParser{
  19. ContentTokens: []*tokenizer.Token{
  20. {
  21. Type: tokenizer.Text,
  22. Value: "Hello",
  23. },
  24. {
  25. Type: tokenizer.Space,
  26. Value: " ",
  27. },
  28. {
  29. Type: tokenizer.Text,
  30. Value: "world",
  31. },
  32. },
  33. },
  34. },
  35. {
  36. text: `Hello
  37. world`,
  38. paragraph: &ParagraphParser{
  39. ContentTokens: []*tokenizer.Token{
  40. {
  41. Type: tokenizer.Text,
  42. Value: "Hello",
  43. },
  44. {
  45. Type: tokenizer.Space,
  46. Value: " ",
  47. },
  48. },
  49. },
  50. },
  51. {
  52. text: `Hello \n
  53. world`,
  54. paragraph: &ParagraphParser{
  55. ContentTokens: []*tokenizer.Token{
  56. {
  57. Type: tokenizer.Text,
  58. Value: "Hello",
  59. },
  60. {
  61. Type: tokenizer.Space,
  62. Value: " ",
  63. },
  64. {
  65. Type: tokenizer.Text,
  66. Value: `\n`,
  67. },
  68. {
  69. Type: tokenizer.Space,
  70. Value: " ",
  71. },
  72. },
  73. },
  74. },
  75. }
  76. for _, test := range tests {
  77. tokens := tokenizer.Tokenize(test.text)
  78. paragraph := NewParagraphParser()
  79. require.Equal(t, test.paragraph, paragraph.Match(tokens))
  80. }
  81. }