Skip to content

Commit

Permalink
fix: children length checks
Browse files Browse the repository at this point in the history
  • Loading branch information
boojack committed Feb 22, 2024
1 parent 06fd394 commit 75b2b15
Show file tree
Hide file tree
Showing 8 changed files with 29 additions and 9 deletions.
4 changes: 3 additions & 1 deletion parser/blockquote.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,9 @@ func (*BlockquoteParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
size++ // NewLine.
}
}

if len(children) == 0 {
return nil, 0
}
return &ast.Blockquote{
Children: children,
}, size
Expand Down
2 changes: 1 addition & 1 deletion parser/bold.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ func (*BoldParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {

size := len(matchedTokens)
children, err := ParseInlineWithParsers(matchedTokens[2:size-2], []InlineParser{NewLinkParser(), NewTextParser()})
if err != nil {
if err != nil || len(children) == 0 {
return nil, 0
}
return &ast.Bold{
Expand Down
7 changes: 6 additions & 1 deletion parser/bold_italic.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,13 @@ func (*BoldItalicParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
}

size := len(matchedTokens)
contentTokens := matchedTokens[3 : size-3]
if len(contentTokens) == 0 {
return nil, 0
}

return &ast.BoldItalic{
Symbol: prefixTokenType,
Content: tokenizer.Stringify(matchedTokens[3 : size-3]),
Content: tokenizer.Stringify(contentTokens),
}, size
}
4 changes: 4 additions & 0 deletions parser/bold_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,10 @@ func TestBoldParser(t *testing.T) {
text: "*Hello world!",
bold: nil,
},
{
text: "****",
bold: nil,
},
{
text: "**Hello**",
bold: &ast.Bold{
Expand Down
1 change: 0 additions & 1 deletion parser/heading.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ func (*HeadingParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
if err != nil {
return nil, 0
}

return &ast.Heading{
Level: level,
Children: children,
Expand Down
1 change: 0 additions & 1 deletion parser/ordered_list.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ func (*OrderedListParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
if len(contentTokens) == 0 {
return nil, 0
}

children, err := ParseInline(contentTokens)
if err != nil {
return nil, 0
Expand Down
18 changes: 15 additions & 3 deletions parser/table.go
Original file line number Diff line number Diff line change
Expand Up @@ -94,15 +94,27 @@ func (*TableParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {

cols := len(tokenizer.Split(headerTokens, tokenizer.Pipe)) - 2
for _, t := range tokenizer.Split(headerTokens, tokenizer.Pipe)[1 : cols+1] {
header = append(header, tokenizer.Stringify(t[1:len(t)-1]))
if len(t) < 3 {
header = append(header, "")
} else {
header = append(header, tokenizer.Stringify(t[1:len(t)-1]))
}
}
for _, t := range tokenizer.Split(delimiterTokens, tokenizer.Pipe)[1 : cols+1] {
delimiter = append(delimiter, tokenizer.Stringify(t[1:len(t)-1]))
if len(t) < 3 {
delimiter = append(delimiter, "")
} else {
delimiter = append(delimiter, tokenizer.Stringify(t[1:len(t)-1]))
}
}
for _, row := range rows {
cells := make([]string, 0)
for _, t := range tokenizer.Split(row, tokenizer.Pipe)[1 : cols+1] {
cells = append(cells, tokenizer.Stringify(t[1:len(t)-1]))
if len(t) < 3 {
cells = append(cells, "")
} else {
cells = append(cells, tokenizer.Stringify(t[1:len(t)-1]))
}
}
rowsStr = append(rowsStr, cells)
}
Expand Down
1 change: 0 additions & 1 deletion parser/task_list.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@ func (*TaskListParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
if len(contentTokens) == 0 {
return nil, 0
}

children, err := ParseInline(contentTokens)
if err != nil {
return nil, 0
Expand Down

0 comments on commit 75b2b15

Please sign in to comment.