bold_test.go 1.3 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788
  1. package parser
  2. import (
  3. "testing"
  4. "github.com/stretchr/testify/require"
  5. "github.com/usememos/memos/plugin/gomark/parser/tokenizer"
  6. )
  7. func TestBoldParser(t *testing.T) {
  8. tests := []struct {
  9. text string
  10. bold *BoldParser
  11. }{
  12. {
  13. text: "*Hello world!",
  14. bold: nil,
  15. },
  16. {
  17. text: "**Hello**",
  18. bold: &BoldParser{
  19. ContentTokens: []*tokenizer.Token{
  20. {
  21. Type: tokenizer.Text,
  22. Value: "Hello",
  23. },
  24. },
  25. },
  26. },
  27. {
  28. text: "** Hello **",
  29. bold: &BoldParser{
  30. ContentTokens: []*tokenizer.Token{
  31. {
  32. Type: tokenizer.Space,
  33. Value: " ",
  34. },
  35. {
  36. Type: tokenizer.Text,
  37. Value: "Hello",
  38. },
  39. {
  40. Type: tokenizer.Space,
  41. Value: " ",
  42. },
  43. },
  44. },
  45. },
  46. {
  47. text: "** Hello * *",
  48. bold: nil,
  49. },
  50. {
  51. text: "* * Hello **",
  52. bold: nil,
  53. },
  54. {
  55. text: `** Hello
  56. **`,
  57. bold: nil,
  58. },
  59. {
  60. text: `**Hello \n**`,
  61. bold: &BoldParser{
  62. ContentTokens: []*tokenizer.Token{
  63. {
  64. Type: tokenizer.Text,
  65. Value: "Hello",
  66. },
  67. {
  68. Type: tokenizer.Space,
  69. Value: " ",
  70. },
  71. {
  72. Type: tokenizer.Text,
  73. Value: `\n`,
  74. },
  75. },
  76. },
  77. },
  78. }
  79. for _, test := range tests {
  80. tokens := tokenizer.Tokenize(test.text)
  81. bold := NewBoldParser()
  82. require.Equal(t, test.bold, bold.Match(tokens))
  83. }
  84. }