Skip to content

Commit

Permalink
contentjson: Add support for tailing comma
Browse files Browse the repository at this point in the history
  • Loading branch information
nekohasekai committed May 29, 2024
1 parent ae816ea commit fca4b8e
Show file tree
Hide file tree
Showing 2 changed files with 46 additions and 5 deletions.
4 changes: 2 additions & 2 deletions common/json/internal/contextjson/scanner.go
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,7 @@ func stateEndValue(s *scanner, c byte) int {
case parseObjectValue:
if c == ',' {
s.parseState[n-1] = parseObjectKey
s.step = stateBeginString
s.step = stateBeginStringOrEmpty
return scanObjectValue
}
if c == '}' {
Expand All @@ -310,7 +310,7 @@ func stateEndValue(s *scanner, c byte) int {
return s.error(c, "after object key:value pair")
case parseArrayValue:
if c == ',' {
s.step = stateBeginValue
s.step = stateBeginValueOrEmpty
return scanArrayValue
}
if c == ']' {
Expand Down
47 changes: 44 additions & 3 deletions common/json/internal/contextjson/stream.go
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,10 @@ func (dec *Decoder) refill() error {
dec.scanp = 0
}

return dec.refill0()
}

func (dec *Decoder) refill0() error {
// Grow buffer if not large enough.
const minRead = 512
if cap(dec.buf)-len(dec.buf) < minRead {
Expand Down Expand Up @@ -402,15 +406,14 @@ func (dec *Decoder) Token() (Token, error) {
return Delim('{'), nil

case '}':
if dec.tokenState != tokenObjectStart && dec.tokenState != tokenObjectComma {
if dec.tokenState != tokenObjectStart && dec.tokenState != tokenObjectComma && dec.tokenState != tokenObjectKey {
return dec.tokenError(c)
}
dec.scanp++
dec.tokenState = dec.tokenStack[len(dec.tokenStack)-1]
dec.tokenStack = dec.tokenStack[:len(dec.tokenStack)-1]
dec.tokenValueEnd()
return Delim('}'), nil

case ':':
if dec.tokenState != tokenObjectColon {
return dec.tokenError(c)
Expand Down Expand Up @@ -483,7 +486,26 @@ func (dec *Decoder) tokenError(c byte) (Token, error) {
// current array or object being parsed.
func (dec *Decoder) More() bool {
c, err := dec.peek()
return err == nil && c != ']' && c != '}'
// return err == nil && c != ']' && c != '}'
if err != nil {
return false
}
if c == ']' || c == '}' {
return false
}
if c == ',' {
scanp := dec.scanp
dec.scanp++
c, err = dec.peekNoRefill()
dec.scanp = scanp
if err != nil {
return false
}
if c == ']' || c == '}' {
return false
}
}
return true
}

func (dec *Decoder) peek() (byte, error) {
Expand All @@ -505,6 +527,25 @@ func (dec *Decoder) peek() (byte, error) {
}
}

func (dec *Decoder) peekNoRefill() (byte, error) {
var err error
for {
for i := dec.scanp; i < len(dec.buf); i++ {
c := dec.buf[i]
if isSpace(c) {
continue
}
dec.scanp = i
return c, nil
}
// buffer has been scanned, now report any error
if err != nil {
return 0, err
}
err = dec.refill0()
}
}

// InputOffset returns the input stream byte offset of the current decoder position.
// The offset gives the location of the end of the most recently returned token
// and the beginning of the next token.
Expand Down

0 comments on commit fca4b8e

Please sign in to comment.