Allow to render container stats and configs both in `json` and `yaml` (#444)
parent
fcc0bbe552
commit
2a9ed70896
@ -1,5 +0,0 @@
|
||||
language: go
|
||||
go:
|
||||
- 1.8.x
|
||||
- tip
|
||||
|
@ -1,27 +0,0 @@
|
||||
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
|
||||
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/mattn/go-colorable"
|
||||
packages = ["."]
|
||||
revision = "167de6bfdfba052fa6b2d3664c8f5272e23c9072"
|
||||
version = "v0.0.9"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/mattn/go-isatty"
|
||||
packages = ["."]
|
||||
revision = "0360b2af4f38e8d38c7fce2a9f4e702702d73a39"
|
||||
version = "v0.0.3"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/sys"
|
||||
packages = ["unix"]
|
||||
revision = "37707fdb30a5b38865cfb95e5aab41707daec7fd"
|
||||
|
||||
[solve-meta]
|
||||
analyzer-name = "dep"
|
||||
analyzer-version = 1
|
||||
inputs-digest = "e8a50671c3cb93ea935bf210b1cd20702876b9d9226129be581ef646d1565cdc"
|
||||
solver-name = "gps-cdcl"
|
||||
solver-version = 1
|
@ -1,30 +0,0 @@
|
||||
|
||||
# Gopkg.toml example
|
||||
#
|
||||
# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
|
||||
# for detailed Gopkg.toml documentation.
|
||||
#
|
||||
# required = ["github.com/user/thing/cmd/thing"]
|
||||
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project"
|
||||
# version = "1.0.0"
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project2"
|
||||
# branch = "dev"
|
||||
# source = "github.com/myfork/project2"
|
||||
#
|
||||
# [[override]]
|
||||
# name = "github.com/x/y"
|
||||
# version = "2.4.0"
|
||||
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/mattn/go-colorable"
|
||||
version = "0.0.9"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/mattn/go-isatty"
|
||||
version = "0.0.3"
|
@ -0,0 +1,31 @@
|
||||
codecov:
|
||||
require_ci_to_pass: yes
|
||||
|
||||
coverage:
|
||||
precision: 2
|
||||
round: down
|
||||
range: "70...100"
|
||||
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: 75%
|
||||
threshold: 2%
|
||||
patch: off
|
||||
changes: no
|
||||
|
||||
parsers:
|
||||
gcov:
|
||||
branch_detection:
|
||||
conditional: yes
|
||||
loop: yes
|
||||
method: no
|
||||
macro: no
|
||||
|
||||
comment:
|
||||
layout: "header,diff"
|
||||
behavior: default
|
||||
require_changes: no
|
||||
|
||||
ignore:
|
||||
- ast
|
@ -0,0 +1,163 @@
|
||||
# 1.10.1 - 2023-03-28
|
||||
|
||||
### Features
|
||||
|
||||
- Quote YAML 1.1 bools at encoding time for compatibility with other legacy parsers
|
||||
- Add support of 32-bit architecture
|
||||
|
||||
### Fix bugs
|
||||
|
||||
- Don't trim all space characters in block style sequence
|
||||
- Support strings starting with `@`
|
||||
|
||||
# 1.10.0 - 2023-03-01
|
||||
|
||||
### Fix bugs
|
||||
|
||||
Reversible conversion of comments was not working in various cases, which has been corrected.
|
||||
**Breaking Change** exists in the comment map interface. However, if you are dealing with CommentMap directly, there is no problem.
|
||||
|
||||
|
||||
# 1.9.8 - 2022-12-19
|
||||
|
||||
### Fix feature
|
||||
|
||||
- Append new line at the end of file ( #329 )
|
||||
|
||||
### Fix bugs
|
||||
|
||||
- Fix custom marshaler ( #333, #334 )
|
||||
- Fix behavior when struct fields conflicted( #335 )
|
||||
- Fix position calculation for literal, folded and raw folded strings ( #330 )
|
||||
|
||||
# 1.9.7 - 2022-12-03
|
||||
|
||||
### Fix bugs
|
||||
|
||||
- Fix handling of quoted map key ( #328 )
|
||||
- Fix resusing process of scanning context ( #322 )
|
||||
|
||||
## v1.9.6 - 2022-10-26
|
||||
|
||||
### New Features
|
||||
|
||||
- Introduce MapKeyNode interface to limit node types for map key ( #312 )
|
||||
|
||||
### Fix bugs
|
||||
|
||||
- Quote strings with special characters in flow mode ( #270 )
|
||||
- typeError implements PrettyPrinter interface ( #280 )
|
||||
- Fix incorrect const type ( #284 )
|
||||
- Fix large literals type inference on 32 bits ( #293 )
|
||||
- Fix UTF-8 characters ( #294 )
|
||||
- Fix decoding of unknown aliases ( #317 )
|
||||
- Fix stream encoder for insert a separator between each encoded document ( #318 )
|
||||
|
||||
### Update
|
||||
|
||||
- Update golang.org/x/sys ( #289 )
|
||||
- Update Go version in CI ( #295 )
|
||||
- Add test cases for missing keys to struct literals ( #300 )
|
||||
|
||||
## v1.9.5 - 2022-01-12
|
||||
|
||||
### New Features
|
||||
|
||||
* Add UseSingleQuote option ( #265 )
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Preserve defaults while decoding nested structs ( #260 )
|
||||
* Fix minor typo in decodeInit error ( #264 )
|
||||
* Handle empty sequence entries ( #275 )
|
||||
* Fix encoding of sequence with multiline string ( #276 )
|
||||
* Fix encoding of BytesMarshaler type ( #277 )
|
||||
* Fix indentState logic for multi-line value ( #278 )
|
||||
|
||||
## v1.9.4 - 2021-10-12
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Keep prev/next reference between tokens containing comments when filtering comment tokens ( #257 )
|
||||
* Supports escaping reserved keywords in PathBuilder ( #258 )
|
||||
|
||||
## v1.9.3 - 2021-09-07
|
||||
|
||||
### New Features
|
||||
|
||||
* Support encoding and decoding `time.Duration` fields ( #246 )
|
||||
* Allow reserved characters for key name in YAMLPath ( #251 )
|
||||
* Support getting YAMLPath from ast.Node ( #252 )
|
||||
* Support CommentToMap option ( #253 )
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix encoding nested sequences with `yaml.IndentSequence` ( #241 )
|
||||
* Fix error reporting on inline structs in strict mode ( #244, #245 )
|
||||
* Fix encoding of large floats ( #247 )
|
||||
|
||||
### Improve workflow
|
||||
|
||||
* Migrate CI from CircleCI to GitHub Action ( #249 )
|
||||
* Add workflow for ycat ( #250 )
|
||||
|
||||
## v1.9.2 - 2021-07-26
|
||||
|
||||
### Support WithComment option ( #238 )
|
||||
|
||||
`yaml.WithComment` is a option for encoding with comment.
|
||||
The position where you want to add a comment is represented by YAMLPath, and it is the key of `yaml.CommentMap`.
|
||||
Also, you can select `Head` comment or `Line` comment as the comment type.
|
||||
|
||||
## v1.9.1 - 2021-07-20
|
||||
|
||||
### Fix DecodeFromNode ( #237 )
|
||||
|
||||
- Fix YAML handling where anchor exists
|
||||
|
||||
## v1.9.0 - 2021-07-19
|
||||
|
||||
### New features
|
||||
|
||||
- Support encoding of comment node ( #233 )
|
||||
- Support `yaml.NodeToValue(ast.Node, interface{}, ...DecodeOption) error` ( #236 )
|
||||
- Can convert a AST node to a value directly
|
||||
|
||||
### Fix decoder for comment
|
||||
|
||||
- Fix parsing of literal with comment ( #234 )
|
||||
|
||||
### Rename API ( #235 )
|
||||
|
||||
- Rename `MarshalWithContext` to `MarshalContext`
|
||||
- Rename `UnmarshalWithContext` to `UnmarshalContext`
|
||||
|
||||
## v1.8.10 - 2021-07-02
|
||||
|
||||
### Fixed bugs
|
||||
|
||||
- Fix searching anchor by alias name ( #212 )
|
||||
- Fixing Issue 186, scanner should account for newline characters when processing multi-line text. Without this source annotations line/column number (for this and all subsequent tokens) is inconsistent with plain text editors. e.g. https://github.com/goccy/go-yaml/issues/186. This addresses the issue specifically for single and double quote text only. ( #210 )
|
||||
- Add error for unterminated flow mapping node ( #213 )
|
||||
- Handle missing required field validation ( #221 )
|
||||
- Nicely format unexpected node type errors ( #229 )
|
||||
- Support to encode map which has defined type key ( #231 )
|
||||
|
||||
### New features
|
||||
|
||||
- Support sequence indentation by EncodeOption ( #232 )
|
||||
|
||||
## v1.8.9 - 2021-03-01
|
||||
|
||||
### Fixed bugs
|
||||
|
||||
- Fix origin buffer for DocumentHeader and DocumentEnd and Directive
|
||||
- Fix origin buffer for anchor value
|
||||
- Fix syntax error about map value
|
||||
- Fix parsing MergeKey ('<<') characters
|
||||
- Fix encoding of float value
|
||||
- Fix incorrect column annotation when single or double quotes are used
|
||||
|
||||
### New features
|
||||
|
||||
- Support to encode/decode of ast.Node directly
|
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 Masaaki Goshima
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
@ -0,0 +1,19 @@
|
||||
.PHONY: test
|
||||
test:
|
||||
go test -v -race ./...
|
||||
|
||||
.PHONY: simple-test
|
||||
simple-test:
|
||||
go test -v ./...
|
||||
|
||||
.PHONY: cover
|
||||
cover:
|
||||
go test -coverprofile=cover.out ./...
|
||||
|
||||
.PHONY: cover-html
|
||||
cover-html: cover
|
||||
go tool cover -html=cover.out
|
||||
|
||||
.PHONY: ycat/build
|
||||
ycat/build:
|
||||
go build -o ycat ./cmd/ycat
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,871 @@
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/goccy/go-yaml/ast"
|
||||
"github.com/goccy/go-yaml/internal/errors"
|
||||
"github.com/goccy/go-yaml/parser"
|
||||
"github.com/goccy/go-yaml/printer"
|
||||
"github.com/goccy/go-yaml/token"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
const (
|
||||
// DefaultIndentSpaces default number of space for indent
|
||||
DefaultIndentSpaces = 2
|
||||
)
|
||||
|
||||
// Encoder writes YAML values to an output stream.
|
||||
type Encoder struct {
|
||||
writer io.Writer
|
||||
opts []EncodeOption
|
||||
indent int
|
||||
indentSequence bool
|
||||
singleQuote bool
|
||||
isFlowStyle bool
|
||||
isJSONStyle bool
|
||||
useJSONMarshaler bool
|
||||
anchorCallback func(*ast.AnchorNode, interface{}) error
|
||||
anchorPtrToNameMap map[uintptr]string
|
||||
customMarshalerMap map[reflect.Type]func(interface{}) ([]byte, error)
|
||||
useLiteralStyleIfMultiline bool
|
||||
commentMap map[*Path][]*Comment
|
||||
written bool
|
||||
|
||||
line int
|
||||
column int
|
||||
offset int
|
||||
indentNum int
|
||||
indentLevel int
|
||||
}
|
||||
|
||||
// NewEncoder returns a new encoder that writes to w.
|
||||
// The Encoder should be closed after use to flush all data to w.
|
||||
func NewEncoder(w io.Writer, opts ...EncodeOption) *Encoder {
|
||||
return &Encoder{
|
||||
writer: w,
|
||||
opts: opts,
|
||||
indent: DefaultIndentSpaces,
|
||||
anchorPtrToNameMap: map[uintptr]string{},
|
||||
customMarshalerMap: map[reflect.Type]func(interface{}) ([]byte, error){},
|
||||
line: 1,
|
||||
column: 1,
|
||||
offset: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// Close closes the encoder by writing any remaining data.
|
||||
// It does not write a stream terminating string "...".
|
||||
func (e *Encoder) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Encode writes the YAML encoding of v to the stream.
|
||||
// If multiple items are encoded to the stream,
|
||||
// the second and subsequent document will be preceded with a "---" document separator,
|
||||
// but the first will not.
|
||||
//
|
||||
// See the documentation for Marshal for details about the conversion of Go values to YAML.
|
||||
func (e *Encoder) Encode(v interface{}) error {
|
||||
return e.EncodeContext(context.Background(), v)
|
||||
}
|
||||
|
||||
// EncodeContext writes the YAML encoding of v to the stream with context.Context.
|
||||
func (e *Encoder) EncodeContext(ctx context.Context, v interface{}) error {
|
||||
node, err := e.EncodeToNodeContext(ctx, v)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "failed to encode to node")
|
||||
}
|
||||
if err := e.setCommentByCommentMap(node); err != nil {
|
||||
return errors.Wrapf(err, "failed to set comment by comment map")
|
||||
}
|
||||
if !e.written {
|
||||
e.written = true
|
||||
} else {
|
||||
// write document separator
|
||||
e.writer.Write([]byte("---\n"))
|
||||
}
|
||||
var p printer.Printer
|
||||
e.writer.Write(p.PrintNode(node))
|
||||
return nil
|
||||
}
|
||||
|
||||
// EncodeToNode convert v to ast.Node.
|
||||
func (e *Encoder) EncodeToNode(v interface{}) (ast.Node, error) {
|
||||
return e.EncodeToNodeContext(context.Background(), v)
|
||||
}
|
||||
|
||||
// EncodeToNodeContext convert v to ast.Node with context.Context.
|
||||
func (e *Encoder) EncodeToNodeContext(ctx context.Context, v interface{}) (ast.Node, error) {
|
||||
for _, opt := range e.opts {
|
||||
if err := opt(e); err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to run option for encoder")
|
||||
}
|
||||
}
|
||||
node, err := e.encodeValue(ctx, reflect.ValueOf(v), 1)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to encode value")
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func (e *Encoder) setCommentByCommentMap(node ast.Node) error {
|
||||
if e.commentMap == nil {
|
||||
return nil
|
||||
}
|
||||
for path, comments := range e.commentMap {
|
||||
n, err := path.FilterNode(node)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "failed to filter node")
|
||||
}
|
||||
if n == nil {
|
||||
continue
|
||||
}
|
||||
for _, comment := range comments {
|
||||
commentTokens := []*token.Token{}
|
||||
for _, text := range comment.Texts {
|
||||
commentTokens = append(commentTokens, token.New(text, text, nil))
|
||||
}
|
||||
commentGroup := ast.CommentGroup(commentTokens)
|
||||
switch comment.Position {
|
||||
case CommentHeadPosition:
|
||||
if err := e.setHeadComment(node, n, commentGroup); err != nil {
|
||||
return errors.Wrapf(err, "failed to set head comment")
|
||||
}
|
||||
case CommentLinePosition:
|
||||
if err := e.setLineComment(node, n, commentGroup); err != nil {
|
||||
return errors.Wrapf(err, "failed to set line comment")
|
||||
}
|
||||
case CommentFootPosition:
|
||||
if err := e.setFootComment(node, n, commentGroup); err != nil {
|
||||
return errors.Wrapf(err, "failed to set foot comment")
|
||||
}
|
||||
default:
|
||||
return ErrUnknownCommentPositionType
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *Encoder) setHeadComment(node ast.Node, filtered ast.Node, comment *ast.CommentGroupNode) error {
|
||||
parent := ast.Parent(node, filtered)
|
||||
if parent == nil {
|
||||
return ErrUnsupportedHeadPositionType(node)
|
||||
}
|
||||
switch p := parent.(type) {
|
||||
case *ast.MappingValueNode:
|
||||
if err := p.SetComment(comment); err != nil {
|
||||
return errors.Wrapf(err, "failed to set comment")
|
||||
}
|
||||
case *ast.MappingNode:
|
||||
if err := p.SetComment(comment); err != nil {
|
||||
return errors.Wrapf(err, "failed to set comment")
|
||||
}
|
||||
case *ast.SequenceNode:
|
||||
if len(p.ValueHeadComments) == 0 {
|
||||
p.ValueHeadComments = make([]*ast.CommentGroupNode, len(p.Values))
|
||||
}
|
||||
var foundIdx int
|
||||
for idx, v := range p.Values {
|
||||
if v == filtered {
|
||||
foundIdx = idx
|
||||
break
|
||||
}
|
||||
}
|
||||
p.ValueHeadComments[foundIdx] = comment
|
||||
default:
|
||||
return ErrUnsupportedHeadPositionType(node)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *Encoder) setLineComment(node ast.Node, filtered ast.Node, comment *ast.CommentGroupNode) error {
|
||||
switch filtered.(type) {
|
||||
case *ast.MappingValueNode, *ast.SequenceNode:
|
||||
// Line comment cannot be set for mapping value node.
|
||||
// It should probably be set for the parent map node
|
||||
if err := e.setLineCommentToParentMapNode(node, filtered, comment); err != nil {
|
||||
return errors.Wrapf(err, "failed to set line comment to parent node")
|
||||
}
|
||||
default:
|
||||
if err := filtered.SetComment(comment); err != nil {
|
||||
return errors.Wrapf(err, "failed to set comment")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *Encoder) setLineCommentToParentMapNode(node ast.Node, filtered ast.Node, comment *ast.CommentGroupNode) error {
|
||||
parent := ast.Parent(node, filtered)
|
||||
if parent == nil {
|
||||
return ErrUnsupportedLinePositionType(node)
|
||||
}
|
||||
switch p := parent.(type) {
|
||||
case *ast.MappingValueNode:
|
||||
if err := p.Key.SetComment(comment); err != nil {
|
||||
return errors.Wrapf(err, "failed to set comment")
|
||||
}
|
||||
case *ast.MappingNode:
|
||||
if err := p.SetComment(comment); err != nil {
|
||||
return errors.Wrapf(err, "failed to set comment")
|
||||
}
|
||||
default:
|
||||
return ErrUnsupportedLinePositionType(parent)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *Encoder) setFootComment(node ast.Node, filtered ast.Node, comment *ast.CommentGroupNode) error {
|
||||
parent := ast.Parent(node, filtered)
|
||||
if parent == nil {
|
||||
return ErrUnsupportedFootPositionType(node)
|
||||
}
|
||||
switch n := parent.(type) {
|
||||
case *ast.MappingValueNode:
|
||||
n.FootComment = comment
|
||||
case *ast.MappingNode:
|
||||
n.FootComment = comment
|
||||
case *ast.SequenceNode:
|
||||
n.FootComment = comment
|
||||
default:
|
||||
return ErrUnsupportedFootPositionType(n)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeDocument(doc []byte) (ast.Node, error) {
|
||||
f, err := parser.ParseBytes(doc, 0)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse yaml")
|
||||
}
|
||||
for _, docNode := range f.Docs {
|
||||
if docNode.Body != nil {
|
||||
return docNode.Body, nil
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (e *Encoder) isInvalidValue(v reflect.Value) bool {
|
||||
if !v.IsValid() {
|
||||
return true
|
||||
}
|
||||
kind := v.Type().Kind()
|
||||
if kind == reflect.Ptr && v.IsNil() {
|
||||
return true
|
||||
}
|
||||
if kind == reflect.Interface && v.IsNil() {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type jsonMarshaler interface {
|
||||
MarshalJSON() ([]byte, error)
|
||||
}
|
||||
|
||||
func (e *Encoder) existsTypeInCustomMarshalerMap(t reflect.Type) bool {
|
||||
if _, exists := e.customMarshalerMap[t]; exists {
|
||||
return true
|
||||
}
|
||||
|
||||
globalCustomMarshalerMu.Lock()
|
||||
defer globalCustomMarshalerMu.Unlock()
|
||||
if _, exists := globalCustomMarshalerMap[t]; exists {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e *Encoder) marshalerFromCustomMarshalerMap(t reflect.Type) (func(interface{}) ([]byte, error), bool) {
|
||||
if marshaler, exists := e.customMarshalerMap[t]; exists {
|
||||
return marshaler, exists
|
||||
}
|
||||
|
||||
globalCustomMarshalerMu.Lock()
|
||||
defer globalCustomMarshalerMu.Unlock()
|
||||
if marshaler, exists := globalCustomMarshalerMap[t]; exists {
|
||||
return marshaler, exists
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func (e *Encoder) canEncodeByMarshaler(v reflect.Value) bool {
|
||||
if !v.CanInterface() {
|
||||
return false
|
||||
}
|
||||
if e.existsTypeInCustomMarshalerMap(v.Type()) {
|
||||
return true
|
||||
}
|
||||
iface := v.Interface()
|
||||
switch iface.(type) {
|
||||
case BytesMarshalerContext:
|
||||
return true
|
||||
case BytesMarshaler:
|
||||
return true
|
||||
case InterfaceMarshalerContext:
|
||||
return true
|
||||
case InterfaceMarshaler:
|
||||
return true
|
||||
case time.Time:
|
||||
return true
|
||||
case time.Duration:
|
||||
return true
|
||||
case encoding.TextMarshaler:
|
||||
return true
|
||||
case jsonMarshaler:
|
||||
return e.useJSONMarshaler
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeByMarshaler(ctx context.Context, v reflect.Value, column int) (ast.Node, error) {
|
||||
iface := v.Interface()
|
||||
|
||||
if marshaler, exists := e.marshalerFromCustomMarshalerMap(v.Type()); exists {
|
||||
doc, err := marshaler(iface)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to MarshalYAML")
|
||||
}
|
||||
node, err := e.encodeDocument(doc)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to encode document")
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
if marshaler, ok := iface.(BytesMarshalerContext); ok {
|
||||
doc, err := marshaler.MarshalYAML(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to MarshalYAML")
|
||||
}
|
||||
node, err := e.encodeDocument(doc)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to encode document")
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
if marshaler, ok := iface.(BytesMarshaler); ok {
|
||||
doc, err := marshaler.MarshalYAML()
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to MarshalYAML")
|
||||
}
|
||||
node, err := e.encodeDocument(doc)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to encode document")
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
if marshaler, ok := iface.(InterfaceMarshalerContext); ok {
|
||||
marshalV, err := marshaler.MarshalYAML(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to MarshalYAML")
|
||||
}
|
||||
return e.encodeValue(ctx, reflect.ValueOf(marshalV), column)
|
||||
}
|
||||
|
||||
if marshaler, ok := iface.(InterfaceMarshaler); ok {
|
||||
marshalV, err := marshaler.MarshalYAML()
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to MarshalYAML")
|
||||
}
|
||||
return e.encodeValue(ctx, reflect.ValueOf(marshalV), column)
|
||||
}
|
||||
|
||||
if t, ok := iface.(time.Time); ok {
|
||||
return e.encodeTime(t, column), nil
|
||||
}
|
||||
|
||||
if t, ok := iface.(time.Duration); ok {
|
||||
return e.encodeDuration(t, column), nil
|
||||
}
|
||||
|
||||
if marshaler, ok := iface.(encoding.TextMarshaler); ok {
|
||||
doc, err := marshaler.MarshalText()
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to MarshalText")
|
||||
}
|
||||
node, err := e.encodeDocument(doc)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to encode document")
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
if e.useJSONMarshaler {
|
||||
if marshaler, ok := iface.(jsonMarshaler); ok {
|
||||
jsonBytes, err := marshaler.MarshalJSON()
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to MarshalJSON")
|
||||
}
|
||||
doc, err := JSONToYAML(jsonBytes)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to convert json to yaml")
|
||||
}
|
||||
node, err := e.encodeDocument(doc)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to encode document")
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, xerrors.Errorf("does not implemented Marshaler")
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeValue(ctx context.Context, v reflect.Value, column int) (ast.Node, error) {
|
||||
if e.isInvalidValue(v) {
|
||||
return e.encodeNil(), nil
|
||||
}
|
||||
if e.canEncodeByMarshaler(v) {
|
||||
node, err := e.encodeByMarshaler(ctx, v, column)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to encode by marshaler")
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return e.encodeInt(v.Int()), nil
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
return e.encodeUint(v.Uint()), nil
|
||||
case reflect.Float32:
|
||||
return e.encodeFloat(v.Float(), 32), nil
|
||||
case reflect.Float64:
|
||||
return e.encodeFloat(v.Float(), 64), nil
|
||||
case reflect.Ptr:
|
||||
anchorName := e.anchorPtrToNameMap[v.Pointer()]
|
||||
if anchorName != "" {
|
||||
aliasName := anchorName
|
||||
alias := ast.Alias(token.New("*", "*", e.pos(column)))
|
||||
alias.Value = ast.String(token.New(aliasName, aliasName, e.pos(column)))
|
||||
return alias, nil
|
||||
}
|
||||
return e.encodeValue(ctx, v.Elem(), column)
|
||||
case reflect.Interface:
|
||||
return e.encodeValue(ctx, v.Elem(), column)
|
||||
case reflect.String:
|
||||
return e.encodeString(v.String(), column), nil
|
||||
case reflect.Bool:
|
||||
return e.encodeBool(v.Bool()), nil
|
||||
case reflect.Slice:
|
||||
if mapSlice, ok := v.Interface().(MapSlice); ok {
|
||||
return e.encodeMapSlice(ctx, mapSlice, column)
|
||||
}
|
||||
return e.encodeSlice(ctx, v)
|
||||
case reflect.Array:
|
||||
return e.encodeArray(ctx, v)
|
||||
case reflect.Struct:
|
||||
if v.CanInterface() {
|
||||
if mapItem, ok := v.Interface().(MapItem); ok {
|
||||
return e.encodeMapItem(ctx, mapItem, column)
|
||||
}
|
||||
if t, ok := v.Interface().(time.Time); ok {
|
||||
return e.encodeTime(t, column), nil
|
||||
}
|
||||
}
|
||||
return e.encodeStruct(ctx, v, column)
|
||||
case reflect.Map:
|
||||
return e.encodeMap(ctx, v, column), nil
|
||||
default:
|
||||
return nil, xerrors.Errorf("unknown value type %s", v.Type().String())
|
||||
}
|
||||
}
|
||||
|
||||
func (e *Encoder) pos(column int) *token.Position {
|
||||
return &token.Position{
|
||||
Line: e.line,
|
||||
Column: column,
|
||||
Offset: e.offset,
|
||||
IndentNum: e.indentNum,
|
||||
IndentLevel: e.indentLevel,
|
||||
}
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeNil() *ast.NullNode {
|
||||
value := "null"
|
||||
return ast.Null(token.New(value, value, e.pos(e.column)))
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeInt(v int64) *ast.IntegerNode {
|
||||
value := fmt.Sprint(v)
|
||||
return ast.Integer(token.New(value, value, e.pos(e.column)))
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeUint(v uint64) *ast.IntegerNode {
|
||||
value := fmt.Sprint(v)
|
||||
return ast.Integer(token.New(value, value, e.pos(e.column)))
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeFloat(v float64, bitSize int) ast.Node {
|
||||
if v == math.Inf(0) {
|
||||
value := ".inf"
|
||||
return ast.Infinity(token.New(value, value, e.pos(e.column)))
|
||||
} else if v == math.Inf(-1) {
|
||||
value := "-.inf"
|
||||
return ast.Infinity(token.New(value, value, e.pos(e.column)))
|
||||
} else if math.IsNaN(v) {
|
||||
value := ".nan"
|
||||
return ast.Nan(token.New(value, value, e.pos(e.column)))
|
||||
}
|
||||
value := strconv.FormatFloat(v, 'g', -1, bitSize)
|
||||
if !strings.Contains(value, ".") && !strings.Contains(value, "e") {
|
||||
// append x.0 suffix to keep float value context
|
||||
value = fmt.Sprintf("%s.0", value)
|
||||
}
|
||||
return ast.Float(token.New(value, value, e.pos(e.column)))
|
||||
}
|
||||
|
||||
func (e *Encoder) isNeedQuoted(v string) bool {
|
||||
if e.isJSONStyle {
|
||||
return true
|
||||
}
|
||||
if e.useLiteralStyleIfMultiline && strings.ContainsAny(v, "\n\r") {
|
||||
return false
|
||||
}
|
||||
if e.isFlowStyle && strings.ContainsAny(v, `]},'"`) {
|
||||
return true
|
||||
}
|
||||
if token.IsNeedQuoted(v) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeString(v string, column int) *ast.StringNode {
|
||||
if e.isNeedQuoted(v) {
|
||||
if e.singleQuote {
|
||||
v = quoteWith(v, '\'')
|
||||
} else {
|
||||
v = strconv.Quote(v)
|
||||
}
|
||||
}
|
||||
return ast.String(token.New(v, v, e.pos(column)))
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeBool(v bool) *ast.BoolNode {
|
||||
value := fmt.Sprint(v)
|
||||
return ast.Bool(token.New(value, value, e.pos(e.column)))
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeSlice(ctx context.Context, value reflect.Value) (*ast.SequenceNode, error) {
|
||||
if e.indentSequence {
|
||||
e.column += e.indent
|
||||
}
|
||||
column := e.column
|
||||
sequence := ast.Sequence(token.New("-", "-", e.pos(column)), e.isFlowStyle)
|
||||
for i := 0; i < value.Len(); i++ {
|
||||
node, err := e.encodeValue(ctx, value.Index(i), column)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to encode value for slice")
|
||||
}
|
||||
sequence.Values = append(sequence.Values, node)
|
||||
}
|
||||
if e.indentSequence {
|
||||
e.column -= e.indent
|
||||
}
|
||||
return sequence, nil
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeArray(ctx context.Context, value reflect.Value) (*ast.SequenceNode, error) {
|
||||
if e.indentSequence {
|
||||
e.column += e.indent
|
||||
}
|
||||
column := e.column
|
||||
sequence := ast.Sequence(token.New("-", "-", e.pos(column)), e.isFlowStyle)
|
||||
for i := 0; i < value.Len(); i++ {
|
||||
node, err := e.encodeValue(ctx, value.Index(i), column)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to encode value for array")
|
||||
}
|
||||
sequence.Values = append(sequence.Values, node)
|
||||
}
|
||||
if e.indentSequence {
|
||||
e.column -= e.indent
|
||||
}
|
||||
return sequence, nil
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeMapItem(ctx context.Context, item MapItem, column int) (*ast.MappingValueNode, error) {
|
||||
k := reflect.ValueOf(item.Key)
|
||||
v := reflect.ValueOf(item.Value)
|
||||
value, err := e.encodeValue(ctx, v, column)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to encode MapItem")
|
||||
}
|
||||
if e.isMapNode(value) {
|
||||
value.AddColumn(e.indent)
|
||||
}
|
||||
return ast.MappingValue(
|
||||
token.New("", "", e.pos(column)),
|
||||
e.encodeString(k.Interface().(string), column),
|
||||
value,
|
||||
), nil
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeMapSlice(ctx context.Context, value MapSlice, column int) (*ast.MappingNode, error) {
|
||||
node := ast.Mapping(token.New("", "", e.pos(column)), e.isFlowStyle)
|
||||
for _, item := range value {
|
||||
value, err := e.encodeMapItem(ctx, item, column)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to encode MapItem for MapSlice")
|
||||
}
|
||||
node.Values = append(node.Values, value)
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func (e *Encoder) isMapNode(node ast.Node) bool {
|
||||
_, ok := node.(ast.MapNode)
|
||||
return ok
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeMap(ctx context.Context, value reflect.Value, column int) ast.Node {
|
||||
node := ast.Mapping(token.New("", "", e.pos(column)), e.isFlowStyle)
|
||||
keys := make([]interface{}, len(value.MapKeys()))
|
||||
for i, k := range value.MapKeys() {
|
||||
keys[i] = k.Interface()
|
||||
}
|
||||
sort.Slice(keys, func(i, j int) bool {
|
||||
return fmt.Sprint(keys[i]) < fmt.Sprint(keys[j])
|
||||
})
|
||||
for _, key := range keys {
|
||||
k := reflect.ValueOf(key)
|
||||
v := value.MapIndex(k)
|
||||
value, err := e.encodeValue(ctx, v, column)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
if e.isMapNode(value) {
|
||||
value.AddColumn(e.indent)
|
||||
}
|
||||
node.Values = append(node.Values, ast.MappingValue(
|
||||
nil,
|
||||
e.encodeString(fmt.Sprint(key), column),
|
||||
value,
|
||||
))
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
// IsZeroer is used to check whether an object is zero to determine
|
||||
// whether it should be omitted when marshaling with the omitempty flag.
|
||||
// One notable implementation is time.Time.
|
||||
type IsZeroer interface {
|
||||
IsZero() bool
|
||||
}
|
||||
|
||||
func (e *Encoder) isZeroValue(v reflect.Value) bool {
|
||||
kind := v.Kind()
|
||||
if z, ok := v.Interface().(IsZeroer); ok {
|
||||
if (kind == reflect.Ptr || kind == reflect.Interface) && v.IsNil() {
|
||||
return true
|
||||
}
|
||||
return z.IsZero()
|
||||
}
|
||||
switch kind {
|
||||
case reflect.String:
|
||||
return len(v.String()) == 0
|
||||
case reflect.Interface, reflect.Ptr:
|
||||
return v.IsNil()
|
||||
case reflect.Slice:
|
||||
return v.Len() == 0
|
||||
case reflect.Map:
|
||||
return v.Len() == 0
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return v.Int() == 0
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return v.Float() == 0
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return v.Uint() == 0
|
||||
case reflect.Bool:
|
||||
return !v.Bool()
|
||||
case reflect.Struct:
|
||||
vt := v.Type()
|
||||
for i := v.NumField() - 1; i >= 0; i-- {
|
||||
if vt.Field(i).PkgPath != "" {
|
||||
continue // private field
|
||||
}
|
||||
if !e.isZeroValue(v.Field(i)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeTime(v time.Time, column int) *ast.StringNode {
|
||||
value := v.Format(time.RFC3339Nano)
|
||||
if e.isJSONStyle {
|
||||
value = strconv.Quote(value)
|
||||
}
|
||||
return ast.String(token.New(value, value, e.pos(column)))
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeDuration(v time.Duration, column int) *ast.StringNode {
|
||||
value := v.String()
|
||||
if e.isJSONStyle {
|
||||
value = strconv.Quote(value)
|
||||
}
|
||||
return ast.String(token.New(value, value, e.pos(column)))
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeAnchor(anchorName string, value ast.Node, fieldValue reflect.Value, column int) (*ast.AnchorNode, error) {
|
||||
anchorNode := ast.Anchor(token.New("&", "&", e.pos(column)))
|
||||
anchorNode.Name = ast.String(token.New(anchorName, anchorName, e.pos(column)))
|
||||
anchorNode.Value = value
|
||||
if e.anchorCallback != nil {
|
||||
if err := e.anchorCallback(anchorNode, fieldValue.Interface()); err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to marshal anchor")
|
||||
}
|
||||
if snode, ok := anchorNode.Name.(*ast.StringNode); ok {
|
||||
anchorName = snode.Value
|
||||
}
|
||||
}
|
||||
if fieldValue.Kind() == reflect.Ptr {
|
||||
e.anchorPtrToNameMap[fieldValue.Pointer()] = anchorName
|
||||
}
|
||||
return anchorNode, nil
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeStruct(ctx context.Context, value reflect.Value, column int) (ast.Node, error) {
|
||||
node := ast.Mapping(token.New("", "", e.pos(column)), e.isFlowStyle)
|
||||
structType := value.Type()
|
||||
structFieldMap, err := structFieldMap(structType)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to get struct field map")
|
||||
}
|
||||
hasInlineAnchorField := false
|
||||
var inlineAnchorValue reflect.Value
|
||||
for i := 0; i < value.NumField(); i++ {
|
||||
field := structType.Field(i)
|
||||
if isIgnoredStructField(field) {
|
||||
continue
|
||||
}
|
||||
fieldValue := value.FieldByName(field.Name)
|
||||
structField := structFieldMap[field.Name]
|
||||
if structField.IsOmitEmpty && e.isZeroValue(fieldValue) {
|
||||
// omit encoding
|
||||
continue
|
||||
}
|
||||
ve := e
|
||||
if !e.isFlowStyle && structField.IsFlow {
|
||||
ve = &Encoder{}
|
||||
*ve = *e
|
||||
ve.isFlowStyle = true
|
||||
}
|
||||
value, err := ve.encodeValue(ctx, fieldValue, column)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to encode value")
|
||||
}
|
||||
if e.isMapNode(value) {
|
||||
value.AddColumn(e.indent)
|
||||
}
|
||||
var key ast.MapKeyNode = e.encodeString(structField.RenderName, column)
|
||||
switch {
|
||||
case structField.AnchorName != "":
|
||||
anchorNode, err := e.encodeAnchor(structField.AnchorName, value, fieldValue, column)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to encode anchor")
|
||||
}
|
||||
value = anchorNode
|
||||
case structField.IsAutoAlias:
|
||||
if fieldValue.Kind() != reflect.Ptr {
|
||||
return nil, xerrors.Errorf(
|
||||
"%s in struct is not pointer type. but required automatically alias detection",
|
||||
structField.FieldName,
|
||||
)
|
||||
}
|
||||
anchorName := e.anchorPtrToNameMap[fieldValue.Pointer()]
|
||||
if anchorName == "" {
|
||||
return nil, xerrors.Errorf(
|
||||
"cannot find anchor name from pointer address for automatically alias detection",
|
||||
)
|
||||
}
|
||||
aliasName := anchorName
|
||||
alias := ast.Alias(token.New("*", "*", e.pos(column)))
|
||||
alias.Value = ast.String(token.New(aliasName, aliasName, e.pos(column)))
|
||||
value = alias
|
||||
if structField.IsInline {
|
||||
// if both used alias and inline, output `<<: *alias`
|
||||
key = ast.MergeKey(token.New("<<", "<<", e.pos(column)))
|
||||
}
|
||||
case structField.AliasName != "":
|
||||
aliasName := structField.AliasName
|
||||
alias := ast.Alias(token.New("*", "*", e.pos(column)))
|
||||
alias.Value = ast.String(token.New(aliasName, aliasName, e.pos(column)))
|
||||
value = alias
|
||||
if structField.IsInline {
|
||||
// if both used alias and inline, output `<<: *alias`
|
||||
key = ast.MergeKey(token.New("<<", "<<", e.pos(column)))
|
||||
}
|
||||
case structField.IsInline:
|
||||
isAutoAnchor := structField.IsAutoAnchor
|
||||
if !hasInlineAnchorField {
|
||||
hasInlineAnchorField = isAutoAnchor
|
||||
}
|
||||
if isAutoAnchor {
|
||||
inlineAnchorValue = fieldValue
|
||||
}
|
||||
mapNode, ok := value.(ast.MapNode)
|
||||
if !ok {
|
||||
return nil, xerrors.Errorf("inline value is must be map or struct type")
|
||||
}
|
||||
mapIter := mapNode.MapRange()
|
||||
for mapIter.Next() {
|
||||
key := mapIter.Key()
|
||||
value := mapIter.Value()
|
||||
keyName := key.GetToken().Value
|
||||
if structFieldMap.isIncludedRenderName(keyName) {
|
||||
// if declared same key name, skip encoding this field
|
||||
continue
|
||||
}
|
||||
key.AddColumn(-e.indent)
|
||||
value.AddColumn(-e.indent)
|
||||
node.Values = append(node.Values, ast.MappingValue(nil, key, value))
|
||||
}
|
||||
continue
|
||||
case structField.IsAutoAnchor:
|
||||
anchorNode, err := e.encodeAnchor(structField.RenderName, value, fieldValue, column)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to encode anchor")
|
||||
}
|
||||
value = anchorNode
|
||||
}
|
||||
node.Values = append(node.Values, ast.MappingValue(nil, key, value))
|
||||
}
|
||||
if hasInlineAnchorField {
|
||||
node.AddColumn(e.indent)
|
||||
anchorName := "anchor"
|
||||
anchorNode := ast.Anchor(token.New("&", "&", e.pos(column)))
|
||||
anchorNode.Name = ast.String(token.New(anchorName, anchorName, e.pos(column)))
|
||||
anchorNode.Value = node
|
||||
if e.anchorCallback != nil {
|
||||
if err := e.anchorCallback(anchorNode, value.Addr().Interface()); err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to marshal anchor")
|
||||
}
|
||||
if snode, ok := anchorNode.Name.(*ast.StringNode); ok {
|
||||
anchorName = snode.Value
|
||||
}
|
||||
}
|
||||
if inlineAnchorValue.Kind() == reflect.Ptr {
|
||||
e.anchorPtrToNameMap[inlineAnchorValue.Pointer()] = anchorName
|
||||
}
|
||||
return anchorNode, nil
|
||||
}
|
||||
return node, nil
|
||||
}
|
@ -0,0 +1,62 @@
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"github.com/goccy/go-yaml/ast"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrInvalidQuery = xerrors.New("invalid query")
|
||||
ErrInvalidPath = xerrors.New("invalid path instance")
|
||||
ErrInvalidPathString = xerrors.New("invalid path string")
|
||||
ErrNotFoundNode = xerrors.New("node not found")
|
||||
ErrUnknownCommentPositionType = xerrors.New("unknown comment position type")
|
||||
ErrInvalidCommentMapValue = xerrors.New("invalid comment map value. it must be not nil value")
|
||||
)
|
||||
|
||||
func ErrUnsupportedHeadPositionType(node ast.Node) error {
|
||||
return xerrors.Errorf("unsupported comment head position for %s", node.Type())
|
||||
}
|
||||
|
||||
func ErrUnsupportedLinePositionType(node ast.Node) error {
|
||||
return xerrors.Errorf("unsupported comment line position for %s", node.Type())
|
||||
}
|
||||
|
||||
func ErrUnsupportedFootPositionType(node ast.Node) error {
|
||||
return xerrors.Errorf("unsupported comment foot position for %s", node.Type())
|
||||
}
|
||||
|
||||
// IsInvalidQueryError whether err is ErrInvalidQuery or not.
|
||||
func IsInvalidQueryError(err error) bool {
|
||||
return xerrors.Is(err, ErrInvalidQuery)
|
||||
}
|
||||
|
||||
// IsInvalidPathError whether err is ErrInvalidPath or not.
|
||||
func IsInvalidPathError(err error) bool {
|
||||
return xerrors.Is(err, ErrInvalidPath)
|
||||
}
|
||||
|
||||
// IsInvalidPathStringError whether err is ErrInvalidPathString or not.
|
||||
func IsInvalidPathStringError(err error) bool {
|
||||
return xerrors.Is(err, ErrInvalidPathString)
|
||||
}
|
||||
|
||||
// IsNotFoundNodeError whether err is ErrNotFoundNode or not.
|
||||
func IsNotFoundNodeError(err error) bool {
|
||||
return xerrors.Is(err, ErrNotFoundNode)
|
||||
}
|
||||
|
||||
// IsInvalidTokenTypeError whether err is ast.ErrInvalidTokenType or not.
|
||||
func IsInvalidTokenTypeError(err error) bool {
|
||||
return xerrors.Is(err, ast.ErrInvalidTokenType)
|
||||
}
|
||||
|
||||
// IsInvalidAnchorNameError whether err is ast.ErrInvalidAnchorName or not.
|
||||
func IsInvalidAnchorNameError(err error) bool {
|
||||
return xerrors.Is(err, ast.ErrInvalidAnchorName)
|
||||
}
|
||||
|
||||
// IsInvalidAliasNameError whether err is ast.ErrInvalidAliasName or not.
|
||||
func IsInvalidAliasNameError(err error) bool {
|
||||
return xerrors.Is(err, ast.ErrInvalidAliasName)
|
||||
}
|
@ -0,0 +1,260 @@
|
||||
package errors
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"reflect"
|
||||
|
||||
"github.com/goccy/go-yaml/printer"
|
||||
"github.com/goccy/go-yaml/token"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultColorize = false
|
||||
defaultIncludeSource = true
|
||||
)
|
||||
|
||||
var (
|
||||
ErrDecodeRequiredPointerType = xerrors.New("required pointer type value")
|
||||
)
|
||||
|
||||
// Wrapf wrap error for stack trace
|
||||
func Wrapf(err error, msg string, args ...interface{}) error {
|
||||
return &wrapError{
|
||||
baseError: &baseError{},
|
||||
err: xerrors.Errorf(msg, args...),
|
||||
nextErr: err,
|
||||
frame: xerrors.Caller(1),
|
||||
}
|
||||
}
|
||||
|
||||
// ErrSyntax create syntax error instance with message and token
|
||||
func ErrSyntax(msg string, tk *token.Token) *syntaxError {
|
||||
return &syntaxError{
|
||||
baseError: &baseError{},
|
||||
msg: msg,
|
||||
token: tk,
|
||||
frame: xerrors.Caller(1),
|
||||
}
|
||||
}
|
||||
|
||||
type baseError struct {
|
||||
state fmt.State
|
||||
verb rune
|
||||
}
|
||||
|
||||
func (e *baseError) Error() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (e *baseError) chainStateAndVerb(err error) {
|
||||
wrapErr, ok := err.(*wrapError)
|
||||
if ok {
|
||||
wrapErr.state = e.state
|
||||
wrapErr.verb = e.verb
|
||||
}
|
||||
syntaxErr, ok := err.(*syntaxError)
|
||||
if ok {
|
||||
syntaxErr.state = e.state
|
||||
syntaxErr.verb = e.verb
|
||||
}
|
||||
}
|
||||
|
||||
type wrapError struct {
|
||||
*baseError
|
||||
err error
|
||||
nextErr error
|
||||
frame xerrors.Frame
|
||||
}
|
||||
|
||||
type FormatErrorPrinter struct {
|
||||
xerrors.Printer
|
||||
Colored bool
|
||||
InclSource bool
|
||||
}
|
||||
|
||||
func (e *wrapError) As(target interface{}) bool {
|
||||
err := e.nextErr
|
||||
for {
|
||||
if wrapErr, ok := err.(*wrapError); ok {
|
||||
err = wrapErr.nextErr
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
return xerrors.As(err, target)
|
||||
}
|
||||
|
||||
func (e *wrapError) Unwrap() error {
|
||||
return e.nextErr
|
||||
}
|
||||
|
||||
func (e *wrapError) PrettyPrint(p xerrors.Printer, colored, inclSource bool) error {
|
||||
return e.FormatError(&FormatErrorPrinter{Printer: p, Colored: colored, InclSource: inclSource})
|
||||
}
|
||||
|
||||
func (e *wrapError) FormatError(p xerrors.Printer) error {
|
||||
if _, ok := p.(*FormatErrorPrinter); !ok {
|
||||
p = &FormatErrorPrinter{
|
||||
Printer: p,
|
||||
Colored: defaultColorize,
|
||||
InclSource: defaultIncludeSource,
|
||||
}
|
||||
}
|
||||
if e.verb == 'v' && e.state.Flag('+') {
|
||||
// print stack trace for debugging
|
||||
p.Print(e.err, "\n")
|
||||
e.frame.Format(p)
|
||||
e.chainStateAndVerb(e.nextErr)
|
||||
return e.nextErr
|
||||
}
|
||||
err := e.nextErr
|
||||
for {
|
||||
if wrapErr, ok := err.(*wrapError); ok {
|
||||
err = wrapErr.nextErr
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
e.chainStateAndVerb(err)
|
||||
if fmtErr, ok := err.(xerrors.Formatter); ok {
|
||||
fmtErr.FormatError(p)
|
||||
} else {
|
||||
p.Print(err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type wrapState struct {
|
||||
org fmt.State
|
||||
}
|
||||
|
||||
func (s *wrapState) Write(b []byte) (n int, err error) {
|
||||
return s.org.Write(b)
|
||||
}
|
||||
|
||||
func (s *wrapState) Width() (wid int, ok bool) {
|
||||
return s.org.Width()
|
||||
}
|
||||
|
||||
func (s *wrapState) Precision() (prec int, ok bool) {
|
||||
return s.org.Precision()
|
||||
}
|
||||
|
||||
func (s *wrapState) Flag(c int) bool {
|
||||
// set true to 'printDetail' forced because when p.Detail() is false, xerrors.Printer no output any text
|
||||
if c == '#' {
|
||||
// ignore '#' keyword because xerrors.FormatError doesn't set true to printDetail.
|
||||
// ( see https://github.com/golang/xerrors/blob/master/adaptor.go#L39-L43 )
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (e *wrapError) Format(state fmt.State, verb rune) {
|
||||
e.state = state
|
||||
e.verb = verb
|
||||
xerrors.FormatError(e, &wrapState{org: state}, verb)
|
||||
}
|
||||
|
||||
func (e *wrapError) Error() string {
|
||||
var buf bytes.Buffer
|
||||
e.PrettyPrint(&Sink{&buf}, defaultColorize, defaultIncludeSource)
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
type syntaxError struct {
|
||||
*baseError
|
||||
msg string
|
||||
token *token.Token
|
||||
frame xerrors.Frame
|
||||
}
|
||||
|
||||
func (e *syntaxError) PrettyPrint(p xerrors.Printer, colored, inclSource bool) error {
|
||||
return e.FormatError(&FormatErrorPrinter{Printer: p, Colored: colored, InclSource: inclSource})
|
||||
}
|
||||
|
||||
func (e *syntaxError) FormatError(p xerrors.Printer) error {
|
||||
var pp printer.Printer
|
||||
|
||||
var colored, inclSource bool
|
||||
if fep, ok := p.(*FormatErrorPrinter); ok {
|
||||
colored = fep.Colored
|
||||
inclSource = fep.InclSource
|
||||
}
|
||||
|
||||
pos := fmt.Sprintf("[%d:%d] ", e.token.Position.Line, e.token.Position.Column)
|
||||
msg := pp.PrintErrorMessage(fmt.Sprintf("%s%s", pos, e.msg), colored)
|
||||
if inclSource {
|
||||
msg += "\n" + pp.PrintErrorToken(e.token, colored)
|
||||
}
|
||||
p.Print(msg)
|
||||
|
||||
if e.verb == 'v' && e.state.Flag('+') {
|
||||
// %+v
|
||||
// print stack trace for debugging
|
||||
e.frame.Format(p)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type PrettyPrinter interface {
|
||||
PrettyPrint(xerrors.Printer, bool, bool) error
|
||||
}
|
||||
type Sink struct{ *bytes.Buffer }
|
||||
|
||||
func (es *Sink) Print(args ...interface{}) {
|
||||
fmt.Fprint(es.Buffer, args...)
|
||||
}
|
||||
|
||||
func (es *Sink) Printf(f string, args ...interface{}) {
|
||||
fmt.Fprintf(es.Buffer, f, args...)
|
||||
}
|
||||
|
||||
func (es *Sink) Detail() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (e *syntaxError) Error() string {
|
||||
var buf bytes.Buffer
|
||||
e.PrettyPrint(&Sink{&buf}, defaultColorize, defaultIncludeSource)
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
type TypeError struct {
|
||||
DstType reflect.Type
|
||||
SrcType reflect.Type
|
||||
StructFieldName *string
|
||||
Token *token.Token
|
||||
}
|
||||
|
||||
func (e *TypeError) Error() string {
|
||||
if e.StructFieldName != nil {
|
||||
return fmt.Sprintf("cannot unmarshal %s into Go struct field %s of type %s", e.SrcType, *e.StructFieldName, e.DstType)
|
||||
}
|
||||
return fmt.Sprintf("cannot unmarshal %s into Go value of type %s", e.SrcType, e.DstType)
|
||||
}
|
||||
|
||||
func (e *TypeError) PrettyPrint(p xerrors.Printer, colored, inclSource bool) error {
|
||||
return e.FormatError(&FormatErrorPrinter{Printer: p, Colored: colored, InclSource: inclSource})
|
||||
}
|
||||
|
||||
func (e *TypeError) FormatError(p xerrors.Printer) error {
|
||||
var pp printer.Printer
|
||||
|
||||
var colored, inclSource bool
|
||||
if fep, ok := p.(*FormatErrorPrinter); ok {
|
||||
colored = fep.Colored
|
||||
inclSource = fep.InclSource
|
||||
}
|
||||
|
||||
pos := fmt.Sprintf("[%d:%d] ", e.Token.Position.Line, e.Token.Position.Column)
|
||||
msg := pp.PrintErrorMessage(fmt.Sprintf("%s%s", pos, e.Error()), colored)
|
||||
if inclSource {
|
||||
msg += "\n" + pp.PrintErrorToken(e.Token, colored)
|
||||
}
|
||||
p.Print(msg)
|
||||
|
||||
return nil
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
package lexer
|
||||
|
||||
import (
|
||||
"io"
|
||||
|
||||
"github.com/goccy/go-yaml/scanner"
|
||||
"github.com/goccy/go-yaml/token"
|
||||
)
|
||||
|
||||
// Tokenize split to token instances from string
|
||||
func Tokenize(src string) token.Tokens {
|
||||
var s scanner.Scanner
|
||||
s.Init(src)
|
||||
var tokens token.Tokens
|
||||
for {
|
||||
subTokens, err := s.Scan()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
tokens.Add(subTokens...)
|
||||
}
|
||||
return tokens
|
||||
}
|
@ -0,0 +1,278 @@
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"io"
|
||||
"reflect"
|
||||
|
||||
"github.com/goccy/go-yaml/ast"
|
||||
)
|
||||
|
||||
// DecodeOption functional option type for Decoder
|
||||
type DecodeOption func(d *Decoder) error
|
||||
|
||||
// ReferenceReaders pass to Decoder that reference to anchor defined by passed readers
|
||||
func ReferenceReaders(readers ...io.Reader) DecodeOption {
|
||||
return func(d *Decoder) error {
|
||||
d.referenceReaders = append(d.referenceReaders, readers...)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// ReferenceFiles pass to Decoder that reference to anchor defined by passed files
|
||||
func ReferenceFiles(files ...string) DecodeOption {
|
||||
return func(d *Decoder) error {
|
||||
d.referenceFiles = files
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// ReferenceDirs pass to Decoder that reference to anchor defined by files under the passed dirs
|
||||
func ReferenceDirs(dirs ...string) DecodeOption {
|
||||
return func(d *Decoder) error {
|
||||
d.referenceDirs = dirs
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// RecursiveDir search yaml file recursively from passed dirs by ReferenceDirs option
|
||||
func RecursiveDir(isRecursive bool) DecodeOption {
|
||||
return func(d *Decoder) error {
|
||||
d.isRecursiveDir = isRecursive
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// Validator set StructValidator instance to Decoder
|
||||
func Validator(v StructValidator) DecodeOption {
|
||||
return func(d *Decoder) error {
|
||||
d.validator = v
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// Strict enable DisallowUnknownField and DisallowDuplicateKey
|
||||
func Strict() DecodeOption {
|
||||
return func(d *Decoder) error {
|
||||
d.disallowUnknownField = true
|
||||
d.disallowDuplicateKey = true
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// DisallowUnknownField causes the Decoder to return an error when the destination
|
||||
// is a struct and the input contains object keys which do not match any
|
||||
// non-ignored, exported fields in the destination.
|
||||
func DisallowUnknownField() DecodeOption {
|
||||
return func(d *Decoder) error {
|
||||
d.disallowUnknownField = true
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// DisallowDuplicateKey causes an error when mapping keys that are duplicates
|
||||
func DisallowDuplicateKey() DecodeOption {
|
||||
return func(d *Decoder) error {
|
||||
d.disallowDuplicateKey = true
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// UseOrderedMap can be interpreted as a map,
|
||||
// and uses MapSlice ( ordered map ) aggressively if there is no type specification
|
||||
func UseOrderedMap() DecodeOption {
|
||||
return func(d *Decoder) error {
|
||||
d.useOrderedMap = true
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// UseJSONUnmarshaler if neither `BytesUnmarshaler` nor `InterfaceUnmarshaler` is implemented
|
||||
// and `UnmashalJSON([]byte)error` is implemented, convert the argument from `YAML` to `JSON` and then call it.
|
||||
func UseJSONUnmarshaler() DecodeOption {
|
||||
return func(d *Decoder) error {
|
||||
d.useJSONUnmarshaler = true
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// CustomUnmarshaler overrides any decoding process for the type specified in generics.
|
||||
//
|
||||
// NOTE: If RegisterCustomUnmarshaler and CustomUnmarshaler of DecodeOption are specified for the same type,
|
||||
// the CustomUnmarshaler specified in DecodeOption takes precedence.
|
||||
func CustomUnmarshaler[T any](unmarshaler func(*T, []byte) error) DecodeOption {
|
||||
return func(d *Decoder) error {
|
||||
var typ *T
|
||||
d.customUnmarshalerMap[reflect.TypeOf(typ)] = func(v interface{}, b []byte) error {
|
||||
return unmarshaler(v.(*T), b)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// EncodeOption functional option type for Encoder
|
||||
type EncodeOption func(e *Encoder) error
|
||||
|
||||
// Indent change indent number
|
||||
func Indent(spaces int) EncodeOption {
|
||||
return func(e *Encoder) error {
|
||||
e.indent = spaces
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// IndentSequence causes sequence values to be indented the same value as Indent
|
||||
func IndentSequence(indent bool) EncodeOption {
|
||||
return func(e *Encoder) error {
|
||||
e.indentSequence = indent
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// UseSingleQuote determines if single or double quotes should be preferred for strings.
|
||||
func UseSingleQuote(sq bool) EncodeOption {
|
||||
return func(e *Encoder) error {
|
||||
e.singleQuote = sq
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// Flow encoding by flow style
|
||||
func Flow(isFlowStyle bool) EncodeOption {
|
||||
return func(e *Encoder) error {
|
||||
e.isFlowStyle = isFlowStyle
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// UseLiteralStyleIfMultiline causes encoding multiline strings with a literal syntax,
|
||||
// no matter what characters they include
|
||||
func UseLiteralStyleIfMultiline(useLiteralStyleIfMultiline bool) EncodeOption {
|
||||
return func(e *Encoder) error {
|
||||
e.useLiteralStyleIfMultiline = useLiteralStyleIfMultiline
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// JSON encode in JSON format
|
||||
func JSON() EncodeOption {
|
||||
return func(e *Encoder) error {
|
||||
e.isJSONStyle = true
|
||||
e.isFlowStyle = true
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// MarshalAnchor call back if encoder find an anchor during encoding
|
||||
func MarshalAnchor(callback func(*ast.AnchorNode, interface{}) error) EncodeOption {
|
||||
return func(e *Encoder) error {
|
||||
e.anchorCallback = callback
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// UseJSONMarshaler if neither `BytesMarshaler` nor `InterfaceMarshaler`
|
||||
// nor `encoding.TextMarshaler` is implemented and `MarshalJSON()([]byte, error)` is implemented,
|
||||
// call `MarshalJSON` to convert the returned `JSON` to `YAML` for processing.
|
||||
func UseJSONMarshaler() EncodeOption {
|
||||
return func(e *Encoder) error {
|
||||
e.useJSONMarshaler = true
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// CustomMarshaler overrides any encoding process for the type specified in generics.
|
||||
//
|
||||
// NOTE: If type T implements MarshalYAML for pointer receiver, the type specified in CustomMarshaler must be *T.
|
||||
// If RegisterCustomMarshaler and CustomMarshaler of EncodeOption are specified for the same type,
|
||||
// the CustomMarshaler specified in EncodeOption takes precedence.
|
||||
func CustomMarshaler[T any](marshaler func(T) ([]byte, error)) EncodeOption {
|
||||
return func(e *Encoder) error {
|
||||
var typ T
|
||||
e.customMarshalerMap[reflect.TypeOf(typ)] = func(v interface{}) ([]byte, error) {
|
||||
return marshaler(v.(T))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// CommentPosition type of the position for comment.
|
||||
type CommentPosition int
|
||||
|
||||
const (
|
||||
CommentHeadPosition CommentPosition = CommentPosition(iota)
|
||||
CommentLinePosition
|
||||
CommentFootPosition
|
||||
)
|
||||
|
||||
func (p CommentPosition) String() string {
|
||||
switch p {
|
||||
case CommentHeadPosition:
|
||||
return "Head"
|
||||
case CommentLinePosition:
|
||||
return "Line"
|
||||
case CommentFootPosition:
|
||||
return "Foot"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
// LineComment create a one-line comment for CommentMap.
|
||||
func LineComment(text string) *Comment {
|
||||
return &Comment{
|
||||
Texts: []string{text},
|
||||
Position: CommentLinePosition,
|
||||
}
|
||||
}
|
||||
|
||||
// HeadComment create a multiline comment for CommentMap.
|
||||
func HeadComment(texts ...string) *Comment {
|
||||
return &Comment{
|
||||
Texts: texts,
|
||||
Position: CommentHeadPosition,
|
||||
}
|
||||
}
|
||||
|
||||
// FootComment create a multiline comment for CommentMap.
|
||||
func FootComment(texts ...string) *Comment {
|
||||
return &Comment{
|
||||
Texts: texts,
|
||||
Position: CommentFootPosition,
|
||||
}
|
||||
}
|
||||
|
||||
// Comment raw data for comment.
|
||||
type Comment struct {
|
||||
Texts []string
|
||||
Position CommentPosition
|
||||
}
|
||||
|
||||
// CommentMap map of the position of the comment and the comment information.
|
||||
type CommentMap map[string][]*Comment
|
||||
|
||||
// WithComment add a comment using the location and text information given in the CommentMap.
|
||||
func WithComment(cm CommentMap) EncodeOption {
|
||||
return func(e *Encoder) error {
|
||||
commentMap := map[*Path][]*Comment{}
|
||||
for k, v := range cm {
|
||||
path, err := PathString(k)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
commentMap[path] = v
|
||||
}
|
||||
e.commentMap = commentMap
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// CommentToMap apply the position and content of comments in a YAML document to a CommentMap.
|
||||
func CommentToMap(cm CommentMap) DecodeOption {
|
||||
return func(d *Decoder) error {
|
||||
if cm == nil {
|
||||
return ErrInvalidCommentMapValue
|
||||
}
|
||||
d.toCommentMap = cm
|
||||
return nil
|
||||
}
|
||||
}
|
@ -0,0 +1,199 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/goccy/go-yaml/token"
|
||||
)
|
||||
|
||||
// context context at parsing
|
||||
type context struct {
|
||||
parent *context
|
||||
idx int
|
||||
size int
|
||||
tokens token.Tokens
|
||||
mode Mode
|
||||
path string
|
||||
}
|
||||
|
||||
var pathSpecialChars = []string{
|
||||
"$", "*", ".", "[", "]",
|
||||
}
|
||||
|
||||
func containsPathSpecialChar(path string) bool {
|
||||
for _, char := range pathSpecialChars {
|
||||
if strings.Contains(path, char) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func normalizePath(path string) string {
|
||||
if containsPathSpecialChar(path) {
|
||||
return fmt.Sprintf("'%s'", path)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
func (c *context) withChild(path string) *context {
|
||||
ctx := c.copy()
|
||||
path = normalizePath(path)
|
||||
ctx.path += fmt.Sprintf(".%s", path)
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (c *context) withIndex(idx uint) *context {
|
||||
ctx := c.copy()
|
||||
ctx.path += fmt.Sprintf("[%d]", idx)
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (c *context) copy() *context {
|
||||
return &context{
|
||||
parent: c,
|
||||
idx: c.idx,
|
||||
size: c.size,
|
||||
tokens: append(token.Tokens{}, c.tokens...),
|
||||
mode: c.mode,
|
||||
path: c.path,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *context) next() bool {
|
||||
return c.idx < c.size
|
||||
}
|
||||
|
||||
func (c *context) previousToken() *token.Token {
|
||||
if c.idx > 0 {
|
||||
return c.tokens[c.idx-1]
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *context) insertToken(idx int, tk *token.Token) {
|
||||
if c.parent != nil {
|
||||
c.parent.insertToken(idx, tk)
|
||||
}
|
||||
if c.size < idx {
|
||||
return
|
||||
}
|
||||
if c.size == idx {
|
||||
curToken := c.tokens[c.size-1]
|
||||
tk.Next = curToken
|
||||
curToken.Prev = tk
|
||||
|
||||
c.tokens = append(c.tokens, tk)
|
||||
c.size = len(c.tokens)
|
||||
return
|
||||
}
|
||||
|
||||
curToken := c.tokens[idx]
|
||||
tk.Next = curToken
|
||||
curToken.Prev = tk
|
||||
|
||||
c.tokens = append(c.tokens[:idx+1], c.tokens[idx:]...)
|
||||
c.tokens[idx] = tk
|
||||
c.size = len(c.tokens)
|
||||
}
|
||||
|
||||
func (c *context) currentToken() *token.Token {
|
||||
if c.idx >= c.size {
|
||||
return nil
|
||||
}
|
||||
return c.tokens[c.idx]
|
||||
}
|
||||
|
||||
func (c *context) nextToken() *token.Token {
|
||||
if c.idx+1 >= c.size {
|
||||
return nil
|
||||
}
|
||||
return c.tokens[c.idx+1]
|
||||
}
|
||||
|
||||
func (c *context) afterNextToken() *token.Token {
|
||||
if c.idx+2 >= c.size {
|
||||
return nil
|
||||
}
|
||||
return c.tokens[c.idx+2]
|
||||
}
|
||||
|
||||
func (c *context) nextNotCommentToken() *token.Token {
|
||||
for i := c.idx + 1; i < c.size; i++ {
|
||||
tk := c.tokens[i]
|
||||
if tk.Type == token.CommentType {
|
||||
continue
|
||||
}
|
||||
return tk
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *context) afterNextNotCommentToken() *token.Token {
|
||||
notCommentTokenCount := 0
|
||||
for i := c.idx + 1; i < c.size; i++ {
|
||||
tk := c.tokens[i]
|
||||
if tk.Type == token.CommentType {
|
||||
continue
|
||||
}
|
||||
notCommentTokenCount++
|
||||
if notCommentTokenCount == 2 {
|
||||
return tk
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *context) enabledComment() bool {
|
||||
return c.mode&ParseComments != 0
|
||||
}
|
||||
|
||||
func (c *context) isCurrentCommentToken() bool {
|
||||
tk := c.currentToken()
|
||||
if tk == nil {
|
||||
return false
|
||||
}
|
||||
return tk.Type == token.CommentType
|
||||
}
|
||||
|
||||
func (c *context) progressIgnoreComment(num int) {
|
||||
if c.parent != nil {
|
||||
c.parent.progressIgnoreComment(num)
|
||||
}
|
||||
if c.size <= c.idx+num {
|
||||
c.idx = c.size
|
||||
} else {
|
||||
c.idx += num
|
||||
}
|
||||
}
|
||||
|
||||
func (c *context) progress(num int) {
|
||||
if c.isCurrentCommentToken() {
|
||||
return
|
||||
}
|
||||
c.progressIgnoreComment(num)
|
||||
}
|
||||
|
||||
func newContext(tokens token.Tokens, mode Mode) *context {
|
||||
filteredTokens := []*token.Token{}
|
||||
if mode&ParseComments != 0 {
|
||||
filteredTokens = tokens
|
||||
} else {
|
||||
for _, tk := range tokens {
|
||||
if tk.Type == token.CommentType {
|
||||
continue
|
||||
}
|
||||
// keep prev/next reference between tokens containing comments
|
||||
// https://github.com/goccy/go-yaml/issues/254
|
||||
filteredTokens = append(filteredTokens, tk)
|
||||
}
|
||||
}
|
||||
return &context{
|
||||
idx: 0,
|
||||
size: len(filteredTokens),
|
||||
tokens: token.Tokens(filteredTokens),
|
||||
mode: mode,
|
||||
path: "$",
|
||||
}
|
||||
}
|
@ -0,0 +1,714 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"strings"
|
||||
|
||||
"github.com/goccy/go-yaml/ast"
|
||||
"github.com/goccy/go-yaml/internal/errors"
|
||||
"github.com/goccy/go-yaml/lexer"
|
||||
"github.com/goccy/go-yaml/token"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
type parser struct{}
|
||||
|
||||
func (p *parser) parseMapping(ctx *context) (*ast.MappingNode, error) {
|
||||
mapTk := ctx.currentToken()
|
||||
node := ast.Mapping(mapTk, true)
|
||||
node.SetPath(ctx.path)
|
||||
ctx.progress(1) // skip MappingStart token
|
||||
for ctx.next() {
|
||||
tk := ctx.currentToken()
|
||||
if tk.Type == token.MappingEndType {
|
||||
node.End = tk
|
||||
return node, nil
|
||||
} else if tk.Type == token.CollectEntryType {
|
||||
ctx.progress(1)
|
||||
continue
|
||||
}
|
||||
|
||||
value, err := p.parseMappingValue(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse mapping value in mapping node")
|
||||
}
|
||||
mvnode, ok := value.(*ast.MappingValueNode)
|
||||
if !ok {
|
||||
return nil, errors.ErrSyntax("failed to parse flow mapping node", value.GetToken())
|
||||
}
|
||||
node.Values = append(node.Values, mvnode)
|
||||
ctx.progress(1)
|
||||
}
|
||||
return nil, errors.ErrSyntax("unterminated flow mapping", node.GetToken())
|
||||
}
|
||||
|
||||
func (p *parser) parseSequence(ctx *context) (*ast.SequenceNode, error) {
|
||||
node := ast.Sequence(ctx.currentToken(), true)
|
||||
node.SetPath(ctx.path)
|
||||
ctx.progress(1) // skip SequenceStart token
|
||||
for ctx.next() {
|
||||
tk := ctx.currentToken()
|
||||
if tk.Type == token.SequenceEndType {
|
||||
node.End = tk
|
||||
break
|
||||
} else if tk.Type == token.CollectEntryType {
|
||||
ctx.progress(1)
|
||||
continue
|
||||
}
|
||||
|
||||
value, err := p.parseToken(ctx.withIndex(uint(len(node.Values))), tk)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse sequence value in flow sequence node")
|
||||
}
|
||||
node.Values = append(node.Values, value)
|
||||
ctx.progress(1)
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseTag(ctx *context) (*ast.TagNode, error) {
|
||||
tagToken := ctx.currentToken()
|
||||
node := ast.Tag(tagToken)
|
||||
node.SetPath(ctx.path)
|
||||
ctx.progress(1) // skip tag token
|
||||
var (
|
||||
value ast.Node
|
||||
err error
|
||||
)
|
||||
switch token.ReservedTagKeyword(tagToken.Value) {
|
||||
case token.MappingTag,
|
||||
token.OrderedMapTag:
|
||||
value, err = p.parseMapping(ctx)
|
||||
case token.IntegerTag,
|
||||
token.FloatTag,
|
||||
token.StringTag,
|
||||
token.BinaryTag,
|
||||
token.TimestampTag,
|
||||
token.NullTag:
|
||||
typ := ctx.currentToken().Type
|
||||
if typ == token.LiteralType || typ == token.FoldedType {
|
||||
value, err = p.parseLiteral(ctx)
|
||||
} else {
|
||||
value = p.parseScalarValue(ctx.currentToken())
|
||||
}
|
||||
case token.SequenceTag,
|
||||
token.SetTag:
|
||||
err = errors.ErrSyntax(fmt.Sprintf("sorry, currently not supported %s tag", tagToken.Value), tagToken)
|
||||
default:
|
||||
// custom tag
|
||||
value, err = p.parseToken(ctx, ctx.currentToken())
|
||||
}
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse tag value")
|
||||
}
|
||||
node.Value = value
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func (p *parser) removeLeftSideNewLineCharacter(src string) string {
|
||||
// CR or LF or CRLF
|
||||
return strings.TrimLeft(strings.TrimLeft(strings.TrimLeft(src, "\r"), "\n"), "\r\n")
|
||||
}
|
||||
|
||||
func (p *parser) existsNewLineCharacter(src string) bool {
|
||||
if strings.Index(src, "\n") > 0 {
|
||||
return true
|
||||
}
|
||||
if strings.Index(src, "\r") > 0 {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (p *parser) validateMapKey(tk *token.Token) error {
|
||||
if tk.Type != token.StringType {
|
||||
return nil
|
||||
}
|
||||
origin := p.removeLeftSideNewLineCharacter(tk.Origin)
|
||||
if p.existsNewLineCharacter(origin) {
|
||||
return errors.ErrSyntax("unexpected key name", tk)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *parser) createNullToken(base *token.Token) *token.Token {
|
||||
pos := *(base.Position)
|
||||
pos.Column++
|
||||
return token.New("null", "null", &pos)
|
||||
}
|
||||
|
||||
func (p *parser) parseMapValue(ctx *context, key ast.MapKeyNode, colonToken *token.Token) (ast.Node, error) {
|
||||
node, err := p.createMapValueNode(ctx, key, colonToken)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to create map value node")
|
||||
}
|
||||
if node != nil && node.GetPath() == "" {
|
||||
node.SetPath(ctx.path)
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func (p *parser) createMapValueNode(ctx *context, key ast.MapKeyNode, colonToken *token.Token) (ast.Node, error) {
|
||||
tk := ctx.currentToken()
|
||||
if tk == nil {
|
||||
nullToken := p.createNullToken(colonToken)
|
||||
ctx.insertToken(ctx.idx, nullToken)
|
||||
return ast.Null(nullToken), nil
|
||||
}
|
||||
|
||||
if tk.Position.Column == key.GetToken().Position.Column && tk.Type == token.StringType {
|
||||
// in this case,
|
||||
// ----
|
||||
// key: <value does not defined>
|
||||
// next
|
||||
nullToken := p.createNullToken(colonToken)
|
||||
ctx.insertToken(ctx.idx, nullToken)
|
||||
return ast.Null(nullToken), nil
|
||||
}
|
||||
|
||||
if tk.Position.Column < key.GetToken().Position.Column {
|
||||
// in this case,
|
||||
// ----
|
||||
// key: <value does not defined>
|
||||
// next
|
||||
nullToken := p.createNullToken(colonToken)
|
||||
ctx.insertToken(ctx.idx, nullToken)
|
||||
return ast.Null(nullToken), nil
|
||||
}
|
||||
|
||||
value, err := p.parseToken(ctx, ctx.currentToken())
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse mapping 'value' node")
|
||||
}
|
||||
return value, nil
|
||||
}
|
||||
|
||||
func (p *parser) validateMapValue(ctx *context, key, value ast.Node) error {
|
||||
keyColumn := key.GetToken().Position.Column
|
||||
valueColumn := value.GetToken().Position.Column
|
||||
if keyColumn != valueColumn {
|
||||
return nil
|
||||
}
|
||||
if value.Type() != ast.StringType {
|
||||
return nil
|
||||
}
|
||||
ntk := ctx.nextToken()
|
||||
if ntk == nil || (ntk.Type != token.MappingValueType && ntk.Type != token.SequenceEntryType) {
|
||||
return errors.ErrSyntax("could not found expected ':' token", value.GetToken())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *parser) parseMappingValue(ctx *context) (ast.Node, error) {
|
||||
key, err := p.parseMapKey(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse map key")
|
||||
}
|
||||
keyText := key.GetToken().Value
|
||||
key.SetPath(ctx.withChild(keyText).path)
|
||||
if err := p.validateMapKey(key.GetToken()); err != nil {
|
||||
return nil, errors.Wrapf(err, "validate mapping key error")
|
||||
}
|
||||
ctx.progress(1) // progress to mapping value token
|
||||
tk := ctx.currentToken() // get mapping value token
|
||||
if tk == nil {
|
||||
return nil, errors.ErrSyntax("unexpected map", key.GetToken())
|
||||
}
|
||||
ctx.progress(1) // progress to value token
|
||||
if err := p.setSameLineCommentIfExists(ctx.withChild(keyText), key); err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to set same line comment to node")
|
||||
}
|
||||
if key.GetComment() != nil {
|
||||
// if current token is comment, GetComment() is not nil.
|
||||
// then progress to value token
|
||||
ctx.progressIgnoreComment(1)
|
||||
}
|
||||
|
||||
value, err := p.parseMapValue(ctx.withChild(keyText), key, tk)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse map value")
|
||||
}
|
||||
if err := p.validateMapValue(ctx, key, value); err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to validate map value")
|
||||
}
|
||||
|
||||
mvnode := ast.MappingValue(tk, key, value)
|
||||
mvnode.SetPath(ctx.withChild(keyText).path)
|
||||
node := ast.Mapping(tk, false, mvnode)
|
||||
node.SetPath(ctx.withChild(keyText).path)
|
||||
|
||||
ntk := ctx.nextNotCommentToken()
|
||||
antk := ctx.afterNextNotCommentToken()
|
||||
for antk != nil && antk.Type == token.MappingValueType &&
|
||||
ntk.Position.Column == key.GetToken().Position.Column {
|
||||
ctx.progressIgnoreComment(1)
|
||||
value, err := p.parseToken(ctx, ctx.currentToken())
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse mapping node")
|
||||
}
|
||||
switch value.Type() {
|
||||
case ast.MappingType:
|
||||
c := value.(*ast.MappingNode)
|
||||
comment := c.GetComment()
|
||||
for idx, v := range c.Values {
|
||||
if idx == 0 && comment != nil {
|
||||
if err := v.SetComment(comment); err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to set comment token to node")
|
||||
}
|
||||
}
|
||||
node.Values = append(node.Values, v)
|
||||
}
|
||||
case ast.MappingValueType:
|
||||
node.Values = append(node.Values, value.(*ast.MappingValueNode))
|
||||
default:
|
||||
return nil, xerrors.Errorf("failed to parse mapping value node node is %s", value.Type())
|
||||
}
|
||||
ntk = ctx.nextNotCommentToken()
|
||||
antk = ctx.afterNextNotCommentToken()
|
||||
}
|
||||
if len(node.Values) == 1 {
|
||||
mapKeyCol := mvnode.Key.GetToken().Position.Column
|
||||
commentTk := ctx.nextToken()
|
||||
if commentTk != nil && commentTk.Type == token.CommentType && mapKeyCol <= commentTk.Position.Column {
|
||||
// If the comment is in the same or deeper column as the last element column in map value,
|
||||
// treat it as a footer comment for the last element.
|
||||
comment := p.parseFootComment(ctx, mapKeyCol)
|
||||
mvnode.FootComment = comment
|
||||
}
|
||||
return mvnode, nil
|
||||
}
|
||||
mapCol := node.GetToken().Position.Column
|
||||
commentTk := ctx.nextToken()
|
||||
if commentTk != nil && commentTk.Type == token.CommentType && mapCol <= commentTk.Position.Column {
|
||||
// If the comment is in the same or deeper column as the last element column in map value,
|
||||
// treat it as a footer comment for the last element.
|
||||
comment := p.parseFootComment(ctx, mapCol)
|
||||
node.FootComment = comment
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseSequenceEntry(ctx *context) (*ast.SequenceNode, error) {
|
||||
tk := ctx.currentToken()
|
||||
sequenceNode := ast.Sequence(tk, false)
|
||||
sequenceNode.SetPath(ctx.path)
|
||||
curColumn := tk.Position.Column
|
||||
for tk.Type == token.SequenceEntryType {
|
||||
ctx.progress(1) // skip sequence token
|
||||
tk = ctx.currentToken()
|
||||
if tk == nil {
|
||||
return nil, errors.ErrSyntax("empty sequence entry", ctx.previousToken())
|
||||
}
|
||||
var comment *ast.CommentGroupNode
|
||||
if tk.Type == token.CommentType {
|
||||
comment = p.parseCommentOnly(ctx)
|
||||
tk = ctx.currentToken()
|
||||
if tk.Type != token.SequenceEntryType {
|
||||
break
|
||||
}
|
||||
ctx.progress(1) // skip sequence token
|
||||
}
|
||||
value, err := p.parseToken(ctx.withIndex(uint(len(sequenceNode.Values))), ctx.currentToken())
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse sequence")
|
||||
}
|
||||
if comment != nil {
|
||||
comment.SetPath(ctx.withIndex(uint(len(sequenceNode.Values))).path)
|
||||
sequenceNode.ValueHeadComments = append(sequenceNode.ValueHeadComments, comment)
|
||||
} else {
|
||||
sequenceNode.ValueHeadComments = append(sequenceNode.ValueHeadComments, nil)
|
||||
}
|
||||
sequenceNode.Values = append(sequenceNode.Values, value)
|
||||
tk = ctx.nextNotCommentToken()
|
||||
if tk == nil {
|
||||
break
|
||||
}
|
||||
if tk.Type != token.SequenceEntryType {
|
||||
break
|
||||
}
|
||||
if tk.Position.Column != curColumn {
|
||||
break
|
||||
}
|
||||
ctx.progressIgnoreComment(1)
|
||||
}
|
||||
commentTk := ctx.nextToken()
|
||||
if commentTk != nil && commentTk.Type == token.CommentType && curColumn <= commentTk.Position.Column {
|
||||
// If the comment is in the same or deeper column as the last element column in sequence value,
|
||||
// treat it as a footer comment for the last element.
|
||||
comment := p.parseFootComment(ctx, curColumn)
|
||||
sequenceNode.FootComment = comment
|
||||
}
|
||||
return sequenceNode, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseAnchor(ctx *context) (*ast.AnchorNode, error) {
|
||||
tk := ctx.currentToken()
|
||||
anchor := ast.Anchor(tk)
|
||||
anchor.SetPath(ctx.path)
|
||||
ntk := ctx.nextToken()
|
||||
if ntk == nil {
|
||||
return nil, errors.ErrSyntax("unexpected anchor. anchor name is undefined", tk)
|
||||
}
|
||||
ctx.progress(1) // skip anchor token
|
||||
name, err := p.parseToken(ctx, ctx.currentToken())
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parser anchor name node")
|
||||
}
|
||||
anchor.Name = name
|
||||
ntk = ctx.nextToken()
|
||||
if ntk == nil {
|
||||
return nil, errors.ErrSyntax("unexpected anchor. anchor value is undefined", ctx.currentToken())
|
||||
}
|
||||
ctx.progress(1)
|
||||
value, err := p.parseToken(ctx, ctx.currentToken())
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parser anchor name node")
|
||||
}
|
||||
anchor.Value = value
|
||||
return anchor, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseAlias(ctx *context) (*ast.AliasNode, error) {
|
||||
tk := ctx.currentToken()
|
||||
alias := ast.Alias(tk)
|
||||
alias.SetPath(ctx.path)
|
||||
ntk := ctx.nextToken()
|
||||
if ntk == nil {
|
||||
return nil, errors.ErrSyntax("unexpected alias. alias name is undefined", tk)
|
||||
}
|
||||
ctx.progress(1) // skip alias token
|
||||
name, err := p.parseToken(ctx, ctx.currentToken())
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parser alias name node")
|
||||
}
|
||||
alias.Value = name
|
||||
return alias, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseMapKey(ctx *context) (ast.MapKeyNode, error) {
|
||||
tk := ctx.currentToken()
|
||||
if value := p.parseScalarValue(tk); value != nil {
|
||||
return value, nil
|
||||
}
|
||||
switch tk.Type {
|
||||
case token.MergeKeyType:
|
||||
return ast.MergeKey(tk), nil
|
||||
case token.MappingKeyType:
|
||||
return p.parseMappingKey(ctx)
|
||||
}
|
||||
return nil, errors.ErrSyntax("unexpected mapping key", tk)
|
||||
}
|
||||
|
||||
func (p *parser) parseStringValue(tk *token.Token) *ast.StringNode {
|
||||
switch tk.Type {
|
||||
case token.StringType,
|
||||
token.SingleQuoteType,
|
||||
token.DoubleQuoteType:
|
||||
return ast.String(tk)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *parser) parseScalarValueWithComment(ctx *context, tk *token.Token) (ast.ScalarNode, error) {
|
||||
node := p.parseScalarValue(tk)
|
||||
if node == nil {
|
||||
return nil, nil
|
||||
}
|
||||
node.SetPath(ctx.path)
|
||||
if p.isSameLineComment(ctx.nextToken(), node) {
|
||||
ctx.progress(1)
|
||||
if err := p.setSameLineCommentIfExists(ctx, node); err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to set same line comment to node")
|
||||
}
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseScalarValue(tk *token.Token) ast.ScalarNode {
|
||||
if node := p.parseStringValue(tk); node != nil {
|
||||
return node
|
||||
}
|
||||
switch tk.Type {
|
||||
case token.NullType:
|
||||
return ast.Null(tk)
|
||||
case token.BoolType:
|
||||
return ast.Bool(tk)
|
||||
case token.IntegerType,
|
||||
token.BinaryIntegerType,
|
||||
token.OctetIntegerType,
|
||||
token.HexIntegerType:
|
||||
return ast.Integer(tk)
|
||||
case token.FloatType:
|
||||
return ast.Float(tk)
|
||||
case token.InfinityType:
|
||||
return ast.Infinity(tk)
|
||||
case token.NanType:
|
||||
return ast.Nan(tk)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *parser) parseDirective(ctx *context) (*ast.DirectiveNode, error) {
|
||||
node := ast.Directive(ctx.currentToken())
|
||||
ctx.progress(1) // skip directive token
|
||||
value, err := p.parseToken(ctx, ctx.currentToken())
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse directive value")
|
||||
}
|
||||
node.Value = value
|
||||
ctx.progress(1)
|
||||
tk := ctx.currentToken()
|
||||
if tk == nil {
|
||||
// Since current token is nil, use the previous token to specify
|
||||
// the syntax error location.
|
||||
return nil, errors.ErrSyntax("unexpected directive value. document not started", ctx.previousToken())
|
||||
}
|
||||
if tk.Type != token.DocumentHeaderType {
|
||||
return nil, errors.ErrSyntax("unexpected directive value. document not started", ctx.currentToken())
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseLiteral(ctx *context) (*ast.LiteralNode, error) {
|
||||
node := ast.Literal(ctx.currentToken())
|
||||
ctx.progress(1) // skip literal/folded token
|
||||
|
||||
tk := ctx.currentToken()
|
||||
var comment *ast.CommentGroupNode
|
||||
if tk.Type == token.CommentType {
|
||||
comment = p.parseCommentOnly(ctx)
|
||||
comment.SetPath(ctx.path)
|
||||
if err := node.SetComment(comment); err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to set comment to literal")
|
||||
}
|
||||
tk = ctx.currentToken()
|
||||
}
|
||||
value, err := p.parseToken(ctx, tk)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse literal/folded value")
|
||||
}
|
||||
snode, ok := value.(*ast.StringNode)
|
||||
if !ok {
|
||||
return nil, errors.ErrSyntax("unexpected token. required string token", value.GetToken())
|
||||
}
|
||||
node.Value = snode
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func (p *parser) isSameLineComment(tk *token.Token, node ast.Node) bool {
|
||||
if tk == nil {
|
||||
return false
|
||||
}
|
||||
if tk.Type != token.CommentType {
|
||||
return false
|
||||
}
|
||||
return tk.Position.Line == node.GetToken().Position.Line
|
||||
}
|
||||
|
||||
func (p *parser) setSameLineCommentIfExists(ctx *context, node ast.Node) error {
|
||||
tk := ctx.currentToken()
|
||||
if !p.isSameLineComment(tk, node) {
|
||||
return nil
|
||||
}
|
||||
comment := ast.CommentGroup([]*token.Token{tk})
|
||||
comment.SetPath(ctx.path)
|
||||
if err := node.SetComment(comment); err != nil {
|
||||
return errors.Wrapf(err, "failed to set comment token to ast.Node")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *parser) parseDocument(ctx *context) (*ast.DocumentNode, error) {
|
||||
startTk := ctx.currentToken()
|
||||
ctx.progress(1) // skip document header token
|
||||
body, err := p.parseToken(ctx, ctx.currentToken())
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse document body")
|
||||
}
|
||||
node := ast.Document(startTk, body)
|
||||
if ntk := ctx.nextToken(); ntk != nil && ntk.Type == token.DocumentEndType {
|
||||
node.End = ntk
|
||||
ctx.progress(1)
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseCommentOnly(ctx *context) *ast.CommentGroupNode {
|
||||
commentTokens := []*token.Token{}
|
||||
for {
|
||||
tk := ctx.currentToken()
|
||||
if tk == nil {
|
||||
break
|
||||
}
|
||||
if tk.Type != token.CommentType {
|
||||
break
|
||||
}
|
||||
commentTokens = append(commentTokens, tk)
|
||||
ctx.progressIgnoreComment(1) // skip comment token
|
||||
}
|
||||
return ast.CommentGroup(commentTokens)
|
||||
}
|
||||
|
||||
func (p *parser) parseFootComment(ctx *context, col int) *ast.CommentGroupNode {
|
||||
commentTokens := []*token.Token{}
|
||||
for {
|
||||
ctx.progressIgnoreComment(1)
|
||||
commentTokens = append(commentTokens, ctx.currentToken())
|
||||
|
||||
nextTk := ctx.nextToken()
|
||||
if nextTk == nil {
|
||||
break
|
||||
}
|
||||
if nextTk.Type != token.CommentType {
|
||||
break
|
||||
}
|
||||
if col > nextTk.Position.Column {
|
||||
break
|
||||
}
|
||||
}
|
||||
return ast.CommentGroup(commentTokens)
|
||||
}
|
||||
|
||||
func (p *parser) parseComment(ctx *context) (ast.Node, error) {
|
||||
group := p.parseCommentOnly(ctx)
|
||||
node, err := p.parseToken(ctx, ctx.currentToken())
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse node after comment")
|
||||
}
|
||||
if node == nil {
|
||||
return group, nil
|
||||
}
|
||||
group.SetPath(node.GetPath())
|
||||
if err := node.SetComment(group); err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to set comment token to node")
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseMappingKey(ctx *context) (*ast.MappingKeyNode, error) {
|
||||
keyTk := ctx.currentToken()
|
||||
node := ast.MappingKey(keyTk)
|
||||
node.SetPath(ctx.path)
|
||||
ctx.progress(1) // skip mapping key token
|
||||
value, err := p.parseToken(ctx.withChild(keyTk.Value), ctx.currentToken())
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse map key")
|
||||
}
|
||||
node.Value = value
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseToken(ctx *context, tk *token.Token) (ast.Node, error) {
|
||||
node, err := p.createNodeFromToken(ctx, tk)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to create node from token")
|
||||
}
|
||||
if node != nil && node.GetPath() == "" {
|
||||
node.SetPath(ctx.path)
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func (p *parser) createNodeFromToken(ctx *context, tk *token.Token) (ast.Node, error) {
|
||||
if tk == nil {
|
||||
return nil, nil
|
||||
}
|
||||
if tk.NextType() == token.MappingValueType {
|
||||
node, err := p.parseMappingValue(ctx)
|
||||
return node, err
|
||||
}
|
||||
node, err := p.parseScalarValueWithComment(ctx, tk)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse scalar value")
|
||||
}
|
||||
if node != nil {
|
||||
return node, nil
|
||||
}
|
||||
switch tk.Type {
|
||||
case token.CommentType:
|
||||
return p.parseComment(ctx)
|
||||
case token.MappingKeyType:
|
||||
return p.parseMappingKey(ctx)
|
||||
case token.DocumentHeaderType:
|
||||
return p.parseDocument(ctx)
|
||||
case token.MappingStartType:
|
||||
return p.parseMapping(ctx)
|
||||
case token.SequenceStartType:
|
||||
return p.parseSequence(ctx)
|
||||
case token.SequenceEntryType:
|
||||
return p.parseSequenceEntry(ctx)
|
||||
case token.AnchorType:
|
||||
return p.parseAnchor(ctx)
|
||||
case token.AliasType:
|
||||
return p.parseAlias(ctx)
|
||||
case token.DirectiveType:
|
||||
return p.parseDirective(ctx)
|
||||
case token.TagType:
|
||||
return p.parseTag(ctx)
|
||||
case token.LiteralType, token.FoldedType:
|
||||
return p.parseLiteral(ctx)
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (p *parser) parse(tokens token.Tokens, mode Mode) (*ast.File, error) {
|
||||
ctx := newContext(tokens, mode)
|
||||
file := &ast.File{Docs: []*ast.DocumentNode{}}
|
||||
for ctx.next() {
|
||||
node, err := p.parseToken(ctx, ctx.currentToken())
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse")
|
||||
}
|
||||
ctx.progressIgnoreComment(1)
|
||||
if node == nil {
|
||||
continue
|
||||
}
|
||||
if doc, ok := node.(*ast.DocumentNode); ok {
|
||||
file.Docs = append(file.Docs, doc)
|
||||
} else {
|
||||
file.Docs = append(file.Docs, ast.Document(nil, node))
|
||||
}
|
||||
}
|
||||
return file, nil
|
||||
}
|
||||
|
||||
type Mode uint
|
||||
|
||||
const (
|
||||
ParseComments Mode = 1 << iota // parse comments and add them to AST
|
||||
)
|
||||
|
||||
// ParseBytes parse from byte slice, and returns ast.File
|
||||
func ParseBytes(bytes []byte, mode Mode) (*ast.File, error) {
|
||||
tokens := lexer.Tokenize(string(bytes))
|
||||
f, err := Parse(tokens, mode)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse")
|
||||
}
|
||||
return f, nil
|
||||
}
|
||||
|
||||
// Parse parse from token instances, and returns ast.File
|
||||
func Parse(tokens token.Tokens, mode Mode) (*ast.File, error) {
|
||||
var p parser
|
||||
f, err := p.parse(tokens, mode)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse")
|
||||
}
|
||||
return f, nil
|
||||
}
|
||||
|
||||
// Parse parse from filename, and returns ast.File
|
||||
func ParseFile(filename string, mode Mode) (*ast.File, error) {
|
||||
file, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to read file: %s", filename)
|
||||
}
|
||||
f, err := ParseBytes(file, mode)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse")
|
||||
}
|
||||
f.Name = filename
|
||||
return f, nil
|
||||
}
|
@ -0,0 +1,794 @@
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/goccy/go-yaml/ast"
|
||||
"github.com/goccy/go-yaml/internal/errors"
|
||||
"github.com/goccy/go-yaml/parser"
|
||||
"github.com/goccy/go-yaml/printer"
|
||||
)
|
||||
|
||||
// PathString create Path from string
|
||||
//
|
||||
// YAMLPath rule
|
||||
// $ : the root object/element
|
||||
// . : child operator
|
||||
// .. : recursive descent
|
||||
// [num] : object/element of array by number
|
||||
// [*] : all objects/elements for array.
|
||||
//
|
||||
// If you want to use reserved characters such as `.` and `*` as a key name,
|
||||
// enclose them in single quotation as follows ( $.foo.'bar.baz-*'.hoge ).
|
||||
// If you want to use a single quote with reserved characters, escape it with `\` ( $.foo.'bar.baz\'s value'.hoge ).
|
||||
func PathString(s string) (*Path, error) {
|
||||
buf := []rune(s)
|
||||
length := len(buf)
|
||||
cursor := 0
|
||||
builder := &PathBuilder{}
|
||||
for cursor < length {
|
||||
c := buf[cursor]
|
||||
switch c {
|
||||
case '$':
|
||||
builder = builder.Root()
|
||||
cursor++
|
||||
case '.':
|
||||
b, buf, c, err := parsePathDot(builder, buf, cursor)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse path of dot")
|
||||
}
|
||||
length = len(buf)
|
||||
builder = b
|
||||
cursor = c
|
||||
case '[':
|
||||
b, buf, c, err := parsePathIndex(builder, buf, cursor)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse path of index")
|
||||
}
|
||||
length = len(buf)
|
||||
builder = b
|
||||
cursor = c
|
||||
default:
|
||||
return nil, errors.Wrapf(ErrInvalidPathString, "invalid path at %d", cursor)
|
||||
}
|
||||
}
|
||||
return builder.Build(), nil
|
||||
}
|
||||
|
||||
func parsePathRecursive(b *PathBuilder, buf []rune, cursor int) (*PathBuilder, []rune, int, error) {
|
||||
length := len(buf)
|
||||
cursor += 2 // skip .. characters
|
||||
start := cursor
|
||||
for ; cursor < length; cursor++ {
|
||||
c := buf[cursor]
|
||||
switch c {
|
||||
case '$':
|
||||
return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '$' after '..' character")
|
||||
case '*':
|
||||
return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '*' after '..' character")
|
||||
case '.', '[':
|
||||
goto end
|
||||
case ']':
|
||||
return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified ']' after '..' character")
|
||||
}
|
||||
}
|
||||
end:
|
||||
if start == cursor {
|
||||
return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "not found recursive selector")
|
||||
}
|
||||
return b.Recursive(string(buf[start:cursor])), buf, cursor, nil
|
||||
}
|
||||
|
||||
func parsePathDot(b *PathBuilder, buf []rune, cursor int) (*PathBuilder, []rune, int, error) {
|
||||
length := len(buf)
|
||||
if cursor+1 < length && buf[cursor+1] == '.' {
|
||||
b, buf, c, err := parsePathRecursive(b, buf, cursor)
|
||||
if err != nil {
|
||||
return nil, nil, 0, errors.Wrapf(err, "failed to parse path of recursive")
|
||||
}
|
||||
return b, buf, c, nil
|
||||
}
|
||||
cursor++ // skip . character
|
||||
start := cursor
|
||||
|
||||
// if started single quote, looking for end single quote char
|
||||
if cursor < length && buf[cursor] == '\'' {
|
||||
return parseQuotedKey(b, buf, cursor)
|
||||
}
|
||||
for ; cursor < length; cursor++ {
|
||||
c := buf[cursor]
|
||||
switch c {
|
||||
case '$':
|
||||
return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '$' after '.' character")
|
||||
case '*':
|
||||
return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '*' after '.' character")
|
||||
case '.', '[':
|
||||
goto end
|
||||
case ']':
|
||||
return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified ']' after '.' character")
|
||||
}
|
||||
}
|
||||
end:
|
||||
if start == cursor {
|
||||
return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "cloud not find by empty key")
|
||||
}
|
||||
return b.child(string(buf[start:cursor])), buf, cursor, nil
|
||||
}
|
||||
|
||||
func parseQuotedKey(b *PathBuilder, buf []rune, cursor int) (*PathBuilder, []rune, int, error) {
|
||||
cursor++ // skip single quote
|
||||
start := cursor
|
||||
length := len(buf)
|
||||
var foundEndDelim bool
|
||||
for ; cursor < length; cursor++ {
|
||||
switch buf[cursor] {
|
||||
case '\\':
|
||||
buf = append(append([]rune{}, buf[:cursor]...), buf[cursor+1:]...)
|
||||
length = len(buf)
|
||||
case '\'':
|
||||
foundEndDelim = true
|
||||
goto end
|
||||
}
|
||||
}
|
||||
end:
|
||||
if !foundEndDelim {
|
||||
return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "could not find end delimiter for key")
|
||||
}
|
||||
if start == cursor {
|
||||
return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "could not find by empty key")
|
||||
}
|
||||
selector := buf[start:cursor]
|
||||
cursor++
|
||||
if cursor < length {
|
||||
switch buf[cursor] {
|
||||
case '$':
|
||||
return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '$' after '.' character")
|
||||
case '*':
|
||||
return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '*' after '.' character")
|
||||
case ']':
|
||||
return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified ']' after '.' character")
|
||||
}
|
||||
}
|
||||
return b.child(string(selector)), buf, cursor, nil
|
||||
}
|
||||
|
||||
func parsePathIndex(b *PathBuilder, buf []rune, cursor int) (*PathBuilder, []rune, int, error) {
|
||||
length := len(buf)
|
||||
cursor++ // skip '[' character
|
||||
if length <= cursor {
|
||||
return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "unexpected end of YAML Path")
|
||||
}
|
||||
c := buf[cursor]
|
||||
switch c {
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '*':
|
||||
start := cursor
|
||||
cursor++
|
||||
for ; cursor < length; cursor++ {
|
||||
c := buf[cursor]
|
||||
switch c {
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
if buf[cursor] != ']' {
|
||||
return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "invalid character %s at %d", string(buf[cursor]), cursor)
|
||||
}
|
||||
numOrAll := string(buf[start:cursor])
|
||||
if numOrAll == "*" {
|
||||
return b.IndexAll(), buf, cursor + 1, nil
|
||||
}
|
||||
num, err := strconv.ParseInt(numOrAll, 10, 64)
|
||||
if err != nil {
|
||||
return nil, nil, 0, errors.Wrapf(err, "failed to parse number")
|
||||
}
|
||||
return b.Index(uint(num)), buf, cursor + 1, nil
|
||||
}
|
||||
return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "invalid character %s at %d", c, cursor)
|
||||
}
|
||||
|
||||
// Path represent YAMLPath ( like a JSONPath ).
|
||||
type Path struct {
|
||||
node pathNode
|
||||
}
|
||||
|
||||
// String path to text.
|
||||
func (p *Path) String() string {
|
||||
return p.node.String()
|
||||
}
|
||||
|
||||
// Read decode from r and set extracted value by YAMLPath to v.
|
||||
func (p *Path) Read(r io.Reader, v interface{}) error {
|
||||
node, err := p.ReadNode(r)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "failed to read node")
|
||||
}
|
||||
if err := Unmarshal([]byte(node.String()), v); err != nil {
|
||||
return errors.Wrapf(err, "failed to unmarshal")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ReadNode create AST from r and extract node by YAMLPath.
|
||||
func (p *Path) ReadNode(r io.Reader) (ast.Node, error) {
|
||||
if p.node == nil {
|
||||
return nil, ErrInvalidPath
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
if _, err := io.Copy(&buf, r); err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to copy from reader")
|
||||
}
|
||||
f, err := parser.ParseBytes(buf.Bytes(), 0)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse yaml")
|
||||
}
|
||||
node, err := p.FilterFile(f)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to filter from ast.File")
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
// Filter filter from target by YAMLPath and set it to v.
|
||||
func (p *Path) Filter(target, v interface{}) error {
|
||||
b, err := Marshal(target)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "failed to marshal target value")
|
||||
}
|
||||
if err := p.Read(bytes.NewBuffer(b), v); err != nil {
|
||||
return errors.Wrapf(err, "failed to read")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// FilterFile filter from ast.File by YAMLPath.
|
||||
func (p *Path) FilterFile(f *ast.File) (ast.Node, error) {
|
||||
for _, doc := range f.Docs {
|
||||
node, err := p.FilterNode(doc.Body)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to filter node by path ( %s )", p.node)
|
||||
}
|
||||
if node != nil {
|
||||
return node, nil
|
||||
}
|
||||
}
|
||||
return nil, errors.Wrapf(ErrNotFoundNode, "failed to find path ( %s )", p.node)
|
||||
}
|
||||
|
||||
// FilterNode filter from node by YAMLPath.
|
||||
func (p *Path) FilterNode(node ast.Node) (ast.Node, error) {
|
||||
n, err := p.node.filter(node)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to filter node by path ( %s )", p.node)
|
||||
}
|
||||
return n, nil
|
||||
}
|
||||
|
||||
// MergeFromReader merge YAML text into ast.File.
|
||||
func (p *Path) MergeFromReader(dst *ast.File, src io.Reader) error {
|
||||
var buf bytes.Buffer
|
||||
if _, err := io.Copy(&buf, src); err != nil {
|
||||
return errors.Wrapf(err, "failed to copy from reader")
|
||||
}
|
||||
file, err := parser.ParseBytes(buf.Bytes(), 0)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "failed to parse")
|
||||
}
|
||||
if err := p.MergeFromFile(dst, file); err != nil {
|
||||
return errors.Wrapf(err, "failed to merge file")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// MergeFromFile merge ast.File into ast.File.
|
||||
func (p *Path) MergeFromFile(dst *ast.File, src *ast.File) error {
|
||||
base, err := p.FilterFile(dst)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "failed to filter file")
|
||||
}
|
||||
for _, doc := range src.Docs {
|
||||
if err := ast.Merge(base, doc); err != nil {
|
||||
return errors.Wrapf(err, "failed to merge")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// MergeFromNode merge ast.Node into ast.File.
|
||||
func (p *Path) MergeFromNode(dst *ast.File, src ast.Node) error {
|
||||
base, err := p.FilterFile(dst)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "failed to filter file")
|
||||
}
|
||||
if err := ast.Merge(base, src); err != nil {
|
||||
return errors.Wrapf(err, "failed to merge")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ReplaceWithReader replace ast.File with io.Reader.
|
||||
func (p *Path) ReplaceWithReader(dst *ast.File, src io.Reader) error {
|
||||
var buf bytes.Buffer
|
||||
if _, err := io.Copy(&buf, src); err != nil {
|
||||
return errors.Wrapf(err, "failed to copy from reader")
|
||||
}
|
||||
file, err := parser.ParseBytes(buf.Bytes(), 0)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "failed to parse")
|
||||
}
|
||||
if err := p.ReplaceWithFile(dst, file); err != nil {
|
||||
return errors.Wrapf(err, "failed to replace file")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ReplaceWithFile replace ast.File with ast.File.
|
||||
func (p *Path) ReplaceWithFile(dst *ast.File, src *ast.File) error {
|
||||
for _, doc := range src.Docs {
|
||||
if err := p.ReplaceWithNode(dst, doc); err != nil {
|
||||
return errors.Wrapf(err, "failed to replace file by path ( %s )", p.node)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ReplaceNode replace ast.File with ast.Node.
|
||||
func (p *Path) ReplaceWithNode(dst *ast.File, node ast.Node) error {
|
||||
for _, doc := range dst.Docs {
|
||||
if node.Type() == ast.DocumentType {
|
||||
node = node.(*ast.DocumentNode).Body
|
||||
}
|
||||
if err := p.node.replace(doc.Body, node); err != nil {
|
||||
return errors.Wrapf(err, "failed to replace node by path ( %s )", p.node)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// AnnotateSource add annotation to passed source ( see section 5.1 in README.md ).
|
||||
func (p *Path) AnnotateSource(source []byte, colored bool) ([]byte, error) {
|
||||
file, err := parser.ParseBytes([]byte(source), 0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
node, err := p.FilterFile(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var pp printer.Printer
|
||||
return []byte(pp.PrintErrorToken(node.GetToken(), colored)), nil
|
||||
}
|
||||
|
||||
// PathBuilder represent builder for YAMLPath.
|
||||
type PathBuilder struct {
|
||||
root *rootNode
|
||||
node pathNode
|
||||
}
|
||||
|
||||
// Root add '$' to current path.
|
||||
func (b *PathBuilder) Root() *PathBuilder {
|
||||
root := newRootNode()
|
||||
return &PathBuilder{root: root, node: root}
|
||||
}
|
||||
|
||||
// IndexAll add '[*]' to current path.
|
||||
func (b *PathBuilder) IndexAll() *PathBuilder {
|
||||
b.node = b.node.chain(newIndexAllNode())
|
||||
return b
|
||||
}
|
||||
|
||||
// Recursive add '..selector' to current path.
|
||||
func (b *PathBuilder) Recursive(selector string) *PathBuilder {
|
||||
b.node = b.node.chain(newRecursiveNode(selector))
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *PathBuilder) containsReservedPathCharacters(path string) bool {
|
||||
if strings.Contains(path, ".") {
|
||||
return true
|
||||
}
|
||||
if strings.Contains(path, "*") {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (b *PathBuilder) enclosedSingleQuote(name string) bool {
|
||||
return strings.HasPrefix(name, "'") && strings.HasSuffix(name, "'")
|
||||
}
|
||||
|
||||
func (b *PathBuilder) normalizeSelectorName(name string) string {
|
||||
if b.enclosedSingleQuote(name) {
|
||||
// already escaped name
|
||||
return name
|
||||
}
|
||||
if b.containsReservedPathCharacters(name) {
|
||||
escapedName := strings.ReplaceAll(name, `'`, `\'`)
|
||||
return "'" + escapedName + "'"
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
func (b *PathBuilder) child(name string) *PathBuilder {
|
||||
b.node = b.node.chain(newSelectorNode(name))
|
||||
return b
|
||||
}
|
||||
|
||||
// Child add '.name' to current path.
|
||||
func (b *PathBuilder) Child(name string) *PathBuilder {
|
||||
return b.child(b.normalizeSelectorName(name))
|
||||
}
|
||||
|
||||
// Index add '[idx]' to current path.
|
||||
func (b *PathBuilder) Index(idx uint) *PathBuilder {
|
||||
b.node = b.node.chain(newIndexNode(idx))
|
||||
return b
|
||||
}
|
||||
|
||||
// Build build YAMLPath.
|
||||
func (b *PathBuilder) Build() *Path {
|
||||
return &Path{node: b.root}
|
||||
}
|
||||
|
||||
type pathNode interface {
|
||||
fmt.Stringer
|
||||
chain(pathNode) pathNode
|
||||
filter(ast.Node) (ast.Node, error)
|
||||
replace(ast.Node, ast.Node) error
|
||||
}
|
||||
|
||||
type basePathNode struct {
|
||||
child pathNode
|
||||
}
|
||||
|
||||
func (n *basePathNode) chain(node pathNode) pathNode {
|
||||
n.child = node
|
||||
return node
|
||||
}
|
||||
|
||||
type rootNode struct {
|
||||
*basePathNode
|
||||
}
|
||||
|
||||
func newRootNode() *rootNode {
|
||||
return &rootNode{basePathNode: &basePathNode{}}
|
||||
}
|
||||
|
||||
func (n *rootNode) String() string {
|
||||
s := "$"
|
||||
if n.child != nil {
|
||||
s += n.child.String()
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (n *rootNode) filter(node ast.Node) (ast.Node, error) {
|
||||
if n.child == nil {
|
||||
return nil, nil
|
||||
}
|
||||
filtered, err := n.child.filter(node)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to filter")
|
||||
}
|
||||
return filtered, nil
|
||||
}
|
||||
|
||||
func (n *rootNode) replace(node ast.Node, target ast.Node) error {
|
||||
if n.child == nil {
|
||||
return nil
|
||||
}
|
||||
if err := n.child.replace(node, target); err != nil {
|
||||
return errors.Wrapf(err, "failed to replace")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type selectorNode struct {
|
||||
*basePathNode
|
||||
selector string
|
||||
}
|
||||
|
||||
func newSelectorNode(selector string) *selectorNode {
|
||||
return &selectorNode{
|
||||
basePathNode: &basePathNode{},
|
||||
selector: selector,
|
||||
}
|
||||
}
|
||||
|
||||
func (n *selectorNode) filter(node ast.Node) (ast.Node, error) {
|
||||
switch node.Type() {
|
||||
case ast.MappingType:
|
||||
for _, value := range node.(*ast.MappingNode).Values {
|
||||
key := value.Key.GetToken().Value
|
||||
if key == n.selector {
|
||||
if n.child == nil {
|
||||
return value.Value, nil
|
||||
}
|
||||
filtered, err := n.child.filter(value.Value)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to filter")
|
||||
}
|
||||
return filtered, nil
|
||||
}
|
||||
}
|
||||
case ast.MappingValueType:
|
||||
value := node.(*ast.MappingValueNode)
|
||||
key := value.Key.GetToken().Value
|
||||
if key == n.selector {
|
||||
if n.child == nil {
|
||||
return value.Value, nil
|
||||
}
|
||||
filtered, err := n.child.filter(value.Value)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to filter")
|
||||
}
|
||||
return filtered, nil
|
||||
}
|
||||
default:
|
||||
return nil, errors.Wrapf(ErrInvalidQuery, "expected node type is map or map value. but got %s", node.Type())
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (n *selectorNode) replaceMapValue(value *ast.MappingValueNode, target ast.Node) error {
|
||||
key := value.Key.GetToken().Value
|
||||
if key != n.selector {
|
||||
return nil
|
||||
}
|
||||
if n.child == nil {
|
||||
if err := value.Replace(target); err != nil {
|
||||
return errors.Wrapf(err, "failed to replace")
|
||||
}
|
||||
} else {
|
||||
if err := n.child.replace(value.Value, target); err != nil {
|
||||
return errors.Wrapf(err, "failed to replace")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n *selectorNode) replace(node ast.Node, target ast.Node) error {
|
||||
switch node.Type() {
|
||||
case ast.MappingType:
|
||||
for _, value := range node.(*ast.MappingNode).Values {
|
||||
if err := n.replaceMapValue(value, target); err != nil {
|
||||
return errors.Wrapf(err, "failed to replace map value")
|
||||
}
|
||||
}
|
||||
case ast.MappingValueType:
|
||||
value := node.(*ast.MappingValueNode)
|
||||
if err := n.replaceMapValue(value, target); err != nil {
|
||||
return errors.Wrapf(err, "failed to replace map value")
|
||||
}
|
||||
default:
|
||||
return errors.Wrapf(ErrInvalidQuery, "expected node type is map or map value. but got %s", node.Type())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n *selectorNode) String() string {
|
||||
s := fmt.Sprintf(".%s", n.selector)
|
||||
if n.child != nil {
|
||||
s += n.child.String()
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
type indexNode struct {
|
||||
*basePathNode
|
||||
selector uint
|
||||
}
|
||||
|
||||
func newIndexNode(selector uint) *indexNode {
|
||||
return &indexNode{
|
||||
basePathNode: &basePathNode{},
|
||||
selector: selector,
|
||||
}
|
||||
}
|
||||
|
||||
func (n *indexNode) filter(node ast.Node) (ast.Node, error) {
|
||||
if node.Type() != ast.SequenceType {
|
||||
return nil, errors.Wrapf(ErrInvalidQuery, "expected sequence type node. but got %s", node.Type())
|
||||
}
|
||||
sequence := node.(*ast.SequenceNode)
|
||||
if n.selector >= uint(len(sequence.Values)) {
|
||||
return nil, errors.Wrapf(ErrInvalidQuery, "expected index is %d. but got sequences has %d items", n.selector, sequence.Values)
|
||||
}
|
||||
value := sequence.Values[n.selector]
|
||||
if n.child == nil {
|
||||
return value, nil
|
||||
}
|
||||
filtered, err := n.child.filter(value)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to filter")
|
||||
}
|
||||
return filtered, nil
|
||||
}
|
||||
|
||||
func (n *indexNode) replace(node ast.Node, target ast.Node) error {
|
||||
if node.Type() != ast.SequenceType {
|
||||
return errors.Wrapf(ErrInvalidQuery, "expected sequence type node. but got %s", node.Type())
|
||||
}
|
||||
sequence := node.(*ast.SequenceNode)
|
||||
if n.selector >= uint(len(sequence.Values)) {
|
||||
return errors.Wrapf(ErrInvalidQuery, "expected index is %d. but got sequences has %d items", n.selector, sequence.Values)
|
||||
}
|
||||
if n.child == nil {
|
||||
if err := sequence.Replace(int(n.selector), target); err != nil {
|
||||
return errors.Wrapf(err, "failed to replace")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if err := n.child.replace(sequence.Values[n.selector], target); err != nil {
|
||||
return errors.Wrapf(err, "failed to replace")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n *indexNode) String() string {
|
||||
s := fmt.Sprintf("[%d]", n.selector)
|
||||
if n.child != nil {
|
||||
s += n.child.String()
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
type indexAllNode struct {
|
||||
*basePathNode
|
||||
}
|
||||
|
||||
func newIndexAllNode() *indexAllNode {
|
||||
return &indexAllNode{
|
||||
basePathNode: &basePathNode{},
|
||||
}
|
||||
}
|
||||
|
||||
func (n *indexAllNode) String() string {
|
||||
s := "[*]"
|
||||
if n.child != nil {
|
||||
s += n.child.String()
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (n *indexAllNode) filter(node ast.Node) (ast.Node, error) {
|
||||
if node.Type() != ast.SequenceType {
|
||||
return nil, errors.Wrapf(ErrInvalidQuery, "expected sequence type node. but got %s", node.Type())
|
||||
}
|
||||
sequence := node.(*ast.SequenceNode)
|
||||
if n.child == nil {
|
||||
return sequence, nil
|
||||
}
|
||||
out := *sequence
|
||||
out.Values = []ast.Node{}
|
||||
for _, value := range sequence.Values {
|
||||
filtered, err := n.child.filter(value)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to filter")
|
||||
}
|
||||
out.Values = append(out.Values, filtered)
|
||||
}
|
||||
return &out, nil
|
||||
}
|
||||
|
||||
func (n *indexAllNode) replace(node ast.Node, target ast.Node) error {
|
||||
if node.Type() != ast.SequenceType {
|
||||
return errors.Wrapf(ErrInvalidQuery, "expected sequence type node. but got %s", node.Type())
|
||||
}
|
||||
sequence := node.(*ast.SequenceNode)
|
||||
if n.child == nil {
|
||||
for idx := range sequence.Values {
|
||||
if err := sequence.Replace(idx, target); err != nil {
|
||||
return errors.Wrapf(err, "failed to replace")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
for _, value := range sequence.Values {
|
||||
if err := n.child.replace(value, target); err != nil {
|
||||
return errors.Wrapf(err, "failed to replace")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type recursiveNode struct {
|
||||
*basePathNode
|
||||
selector string
|
||||
}
|
||||
|
||||
func newRecursiveNode(selector string) *recursiveNode {
|
||||
return &recursiveNode{
|
||||
basePathNode: &basePathNode{},
|
||||
selector: selector,
|
||||
}
|
||||
}
|
||||
|
||||
func (n *recursiveNode) String() string {
|
||||
s := fmt.Sprintf("..%s", n.selector)
|
||||
if n.child != nil {
|
||||
s += n.child.String()
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (n *recursiveNode) filterNode(node ast.Node) (*ast.SequenceNode, error) {
|
||||
sequence := &ast.SequenceNode{BaseNode: &ast.BaseNode{}}
|
||||
switch typedNode := node.(type) {
|
||||
case *ast.MappingNode:
|
||||
for _, value := range typedNode.Values {
|
||||
seq, err := n.filterNode(value)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to filter")
|
||||
}
|
||||
sequence.Values = append(sequence.Values, seq.Values...)
|
||||
}
|
||||
case *ast.MappingValueNode:
|
||||
key := typedNode.Key.GetToken().Value
|
||||
if n.selector == key {
|
||||
sequence.Values = append(sequence.Values, typedNode.Value)
|
||||
}
|
||||
seq, err := n.filterNode(typedNode.Value)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to filter")
|
||||
}
|
||||
sequence.Values = append(sequence.Values, seq.Values...)
|
||||
case *ast.SequenceNode:
|
||||
for _, value := range typedNode.Values {
|
||||
seq, err := n.filterNode(value)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to filter")
|
||||
}
|
||||
sequence.Values = append(sequence.Values, seq.Values...)
|
||||
}
|
||||
}
|
||||
return sequence, nil
|
||||
}
|
||||
|
||||
func (n *recursiveNode) filter(node ast.Node) (ast.Node, error) {
|
||||
sequence, err := n.filterNode(node)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to filter")
|
||||
}
|
||||
sequence.Start = node.GetToken()
|
||||
return sequence, nil
|
||||
}
|
||||
|
||||
func (n *recursiveNode) replaceNode(node ast.Node, target ast.Node) error {
|
||||
switch typedNode := node.(type) {
|
||||
case *ast.MappingNode:
|
||||
for _, value := range typedNode.Values {
|
||||
if err := n.replaceNode(value, target); err != nil {
|
||||
return errors.Wrapf(err, "failed to replace")
|
||||
}
|
||||
}
|
||||
case *ast.MappingValueNode:
|
||||
key := typedNode.Key.GetToken().Value
|
||||
if n.selector == key {
|
||||
if err := typedNode.Replace(target); err != nil {
|
||||
return errors.Wrapf(err, "failed to replace")
|
||||
}
|
||||
}
|
||||
if err := n.replaceNode(typedNode.Value, target); err != nil {
|
||||
return errors.Wrapf(err, "failed to replace")
|
||||
}
|
||||
case *ast.SequenceNode:
|
||||
for _, value := range typedNode.Values {
|
||||
if err := n.replaceNode(value, target); err != nil {
|
||||
return errors.Wrapf(err, "failed to replace")
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n *recursiveNode) replace(node ast.Node, target ast.Node) error {
|
||||
if err := n.replaceNode(node, target); err != nil {
|
||||
return errors.Wrapf(err, "failed to replace")
|
||||
}
|
||||
return nil
|
||||
}
|
@ -0,0 +1,352 @@
|
||||
package printer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strings"
|
||||
|
||||
"github.com/fatih/color"
|
||||
"github.com/goccy/go-yaml/ast"
|
||||
"github.com/goccy/go-yaml/token"
|
||||
)
|
||||
|
||||
// Property additional property set for each the token
|
||||
type Property struct {
|
||||
Prefix string
|
||||
Suffix string
|
||||
}
|
||||
|
||||
// PrintFunc returns property instance
|
||||
type PrintFunc func() *Property
|
||||
|
||||
// Printer create text from token collection or ast
|
||||
type Printer struct {
|
||||
LineNumber bool
|
||||
LineNumberFormat func(num int) string
|
||||
MapKey PrintFunc
|
||||
Anchor PrintFunc
|
||||
Alias PrintFunc
|
||||
Bool PrintFunc
|
||||
String PrintFunc
|
||||
Number PrintFunc
|
||||
}
|
||||
|
||||
func defaultLineNumberFormat(num int) string {
|
||||
return fmt.Sprintf("%2d | ", num)
|
||||
}
|
||||
|
||||
func (p *Printer) property(tk *token.Token) *Property {
|
||||
prop := &Property{}
|
||||
switch tk.PreviousType() {
|
||||
case token.AnchorType:
|
||||
if p.Anchor != nil {
|
||||
return p.Anchor()
|
||||
}
|
||||
return prop
|
||||
case token.AliasType:
|
||||
if p.Alias != nil {
|
||||
return p.Alias()
|
||||
}
|
||||
return prop
|
||||
}
|
||||
switch tk.NextType() {
|
||||
case token.MappingValueType:
|
||||
if p.MapKey != nil {
|
||||
return p.MapKey()
|
||||
}
|
||||
return prop
|
||||
}
|
||||
switch tk.Type {
|
||||
case token.BoolType:
|
||||
if p.Bool != nil {
|
||||
return p.Bool()
|
||||
}
|
||||
return prop
|
||||
case token.AnchorType:
|
||||
if p.Anchor != nil {
|
||||
return p.Anchor()
|
||||
}
|
||||
return prop
|
||||
case token.AliasType:
|
||||
if p.Anchor != nil {
|
||||
return p.Alias()
|
||||
}
|
||||
return prop
|
||||
case token.StringType, token.SingleQuoteType, token.DoubleQuoteType:
|
||||
if p.String != nil {
|
||||
return p.String()
|
||||
}
|
||||
return prop
|
||||
case token.IntegerType, token.FloatType:
|
||||
if p.Number != nil {
|
||||
return p.Number()
|
||||
}
|
||||
return prop
|
||||
default:
|
||||
}
|
||||
return prop
|
||||
}
|
||||
|
||||
// PrintTokens create text from token collection
|
||||
func (p *Printer) PrintTokens(tokens token.Tokens) string {
|
||||
if len(tokens) == 0 {
|
||||
return ""
|
||||
}
|
||||
if p.LineNumber {
|
||||
if p.LineNumberFormat == nil {
|
||||
p.LineNumberFormat = defaultLineNumberFormat
|
||||
}
|
||||
}
|
||||
texts := []string{}
|
||||
lineNumber := tokens[0].Position.Line
|
||||
for _, tk := range tokens {
|
||||
lines := strings.Split(tk.Origin, "\n")
|
||||
prop := p.property(tk)
|
||||
header := ""
|
||||
if p.LineNumber {
|
||||
header = p.LineNumberFormat(lineNumber)
|
||||
}
|
||||
if len(lines) == 1 {
|
||||
line := prop.Prefix + lines[0] + prop.Suffix
|
||||
if len(texts) == 0 {
|
||||
texts = append(texts, header+line)
|
||||
lineNumber++
|
||||
} else {
|
||||
text := texts[len(texts)-1]
|
||||
texts[len(texts)-1] = text + line
|
||||
}
|
||||
} else {
|
||||
for idx, src := range lines {
|
||||
if p.LineNumber {
|
||||
header = p.LineNumberFormat(lineNumber)
|
||||
}
|
||||
line := prop.Prefix + src + prop.Suffix
|
||||
if idx == 0 {
|
||||
if len(texts) == 0 {
|
||||
texts = append(texts, header+line)
|
||||
lineNumber++
|
||||
} else {
|
||||
text := texts[len(texts)-1]
|
||||
texts[len(texts)-1] = text + line
|
||||
}
|
||||
} else {
|
||||
texts = append(texts, fmt.Sprintf("%s%s", header, line))
|
||||
lineNumber++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return strings.Join(texts, "\n")
|
||||
}
|
||||
|
||||
// PrintNode create text from ast.Node
|
||||
func (p *Printer) PrintNode(node ast.Node) []byte {
|
||||
return []byte(fmt.Sprintf("%+v\n", node))
|
||||
}
|
||||
|
||||
const escape = "\x1b"
|
||||
|
||||
func format(attr color.Attribute) string {
|
||||
return fmt.Sprintf("%s[%dm", escape, attr)
|
||||
}
|
||||
|
||||
func (p *Printer) setDefaultColorSet() {
|
||||
p.Bool = func() *Property {
|
||||
return &Property{
|
||||
Prefix: format(color.FgHiMagenta),
|
||||
Suffix: format(color.Reset),
|
||||
}
|
||||
}
|
||||
p.Number = func() *Property {
|
||||
return &Property{
|
||||
Prefix: format(color.FgHiMagenta),
|
||||
Suffix: format(color.Reset),
|
||||
}
|
||||
}
|
||||
p.MapKey = func() *Property {
|
||||
return &Property{
|
||||
Prefix: format(color.FgHiCyan),
|
||||
Suffix: format(color.Reset),
|
||||
}
|
||||
}
|
||||
p.Anchor = func() *Property {
|
||||
return &Property{
|
||||
Prefix: format(color.FgHiYellow),
|
||||
Suffix: format(color.Reset),
|
||||
}
|
||||
}
|
||||
p.Alias = func() *Property {
|
||||
return &Property{
|
||||
Prefix: format(color.FgHiYellow),
|
||||
Suffix: format(color.Reset),
|
||||
}
|
||||
}
|
||||
p.String = func() *Property {
|
||||
return &Property{
|
||||
Prefix: format(color.FgHiGreen),
|
||||
Suffix: format(color.Reset),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Printer) PrintErrorMessage(msg string, isColored bool) string {
|
||||
if isColored {
|
||||
return fmt.Sprintf("%s%s%s",
|
||||
format(color.FgHiRed),
|
||||
msg,
|
||||
format(color.Reset),
|
||||
)
|
||||
}
|
||||
return msg
|
||||
}
|
||||
|
||||
func (p *Printer) removeLeftSideNewLineChar(src string) string {
|
||||
return strings.TrimLeft(strings.TrimLeft(strings.TrimLeft(src, "\r"), "\n"), "\r\n")
|
||||
}
|
||||
|
||||
func (p *Printer) removeRightSideNewLineChar(src string) string {
|
||||
return strings.TrimRight(strings.TrimRight(strings.TrimRight(src, "\r"), "\n"), "\r\n")
|
||||
}
|
||||
|
||||
func (p *Printer) removeRightSideWhiteSpaceChar(src string) string {
|
||||
return p.removeRightSideNewLineChar(strings.TrimRight(src, " "))
|
||||
}
|
||||
|
||||
func (p *Printer) newLineCount(s string) int {
|
||||
src := []rune(s)
|
||||
size := len(src)
|
||||
cnt := 0
|
||||
for i := 0; i < size; i++ {
|
||||
c := src[i]
|
||||
switch c {
|
||||
case '\r':
|
||||
if i+1 < size && src[i+1] == '\n' {
|
||||
i++
|
||||
}
|
||||
cnt++
|
||||
case '\n':
|
||||
cnt++
|
||||
}
|
||||
}
|
||||
return cnt
|
||||
}
|
||||
|
||||
func (p *Printer) isNewLineLastChar(s string) bool {
|
||||
for i := len(s) - 1; i > 0; i-- {
|
||||
c := s[i]
|
||||
switch c {
|
||||
case ' ':
|
||||
continue
|
||||
case '\n', '\r':
|
||||
return true
|
||||
}
|
||||
break
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (p *Printer) printBeforeTokens(tk *token.Token, minLine, extLine int) token.Tokens {
|
||||
for {
|
||||
if tk.Prev == nil {
|
||||
break
|
||||
}
|
||||
if tk.Prev.Position.Line < minLine {
|
||||
break
|
||||
}
|
||||
tk = tk.Prev
|
||||
}
|
||||
minTk := tk.Clone()
|
||||
if minTk.Prev != nil {
|
||||
// add white spaces to minTk by prev token
|
||||
prev := minTk.Prev
|
||||
whiteSpaceLen := len(prev.Origin) - len(strings.TrimRight(prev.Origin, " "))
|
||||
minTk.Origin = strings.Repeat(" ", whiteSpaceLen) + minTk.Origin
|
||||
}
|
||||
minTk.Origin = p.removeLeftSideNewLineChar(minTk.Origin)
|
||||
tokens := token.Tokens{minTk}
|
||||
tk = minTk.Next
|
||||
for tk != nil && tk.Position.Line <= extLine {
|
||||
clonedTk := tk.Clone()
|
||||
tokens.Add(clonedTk)
|
||||
tk = clonedTk.Next
|
||||
}
|
||||
lastTk := tokens[len(tokens)-1]
|
||||
trimmedOrigin := p.removeRightSideWhiteSpaceChar(lastTk.Origin)
|
||||
suffix := lastTk.Origin[len(trimmedOrigin):]
|
||||
lastTk.Origin = trimmedOrigin
|
||||
|
||||
if lastTk.Next != nil && len(suffix) > 1 {
|
||||
next := lastTk.Next.Clone()
|
||||
// add suffix to header of next token
|
||||
if suffix[0] == '\n' || suffix[0] == '\r' {
|
||||
suffix = suffix[1:]
|
||||
}
|
||||
next.Origin = suffix + next.Origin
|
||||
lastTk.Next = next
|
||||
}
|
||||
return tokens
|
||||
}
|
||||
|
||||
func (p *Printer) printAfterTokens(tk *token.Token, maxLine int) token.Tokens {
|
||||
tokens := token.Tokens{}
|
||||
if tk == nil {
|
||||
return tokens
|
||||
}
|
||||
if tk.Position.Line > maxLine {
|
||||
return tokens
|
||||
}
|
||||
minTk := tk.Clone()
|
||||
minTk.Origin = p.removeLeftSideNewLineChar(minTk.Origin)
|
||||
tokens.Add(minTk)
|
||||
tk = minTk.Next
|
||||
for tk != nil && tk.Position.Line <= maxLine {
|
||||
clonedTk := tk.Clone()
|
||||
tokens.Add(clonedTk)
|
||||
tk = clonedTk.Next
|
||||
}
|
||||
return tokens
|
||||
}
|
||||
|
||||
func (p *Printer) setupErrorTokenFormat(annotateLine int, isColored bool) {
|
||||
prefix := func(annotateLine, num int) string {
|
||||
if annotateLine == num {
|
||||
return fmt.Sprintf("> %2d | ", num)
|
||||
}
|
||||
return fmt.Sprintf(" %2d | ", num)
|
||||
}
|
||||
p.LineNumber = true
|
||||
p.LineNumberFormat = func(num int) string {
|
||||
if isColored {
|
||||
fn := color.New(color.Bold, color.FgHiWhite).SprintFunc()
|
||||
return fn(prefix(annotateLine, num))
|
||||
}
|
||||
return prefix(annotateLine, num)
|
||||
}
|
||||
if isColored {
|
||||
p.setDefaultColorSet()
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Printer) PrintErrorToken(tk *token.Token, isColored bool) string {
|
||||
errToken := tk
|
||||
curLine := tk.Position.Line
|
||||
curExtLine := curLine + p.newLineCount(p.removeLeftSideNewLineChar(tk.Origin))
|
||||
if p.isNewLineLastChar(tk.Origin) {
|
||||
// if last character ( exclude white space ) is new line character, ignore it.
|
||||
curExtLine--
|
||||
}
|
||||
|
||||
minLine := int(math.Max(float64(curLine-3), 1))
|
||||
maxLine := curExtLine + 3
|
||||
p.setupErrorTokenFormat(curLine, isColored)
|
||||
|
||||
beforeTokens := p.printBeforeTokens(tk, minLine, curExtLine)
|
||||
lastTk := beforeTokens[len(beforeTokens)-1]
|
||||
afterTokens := p.printAfterTokens(lastTk.Next, maxLine)
|
||||
|
||||
beforeSource := p.PrintTokens(beforeTokens)
|
||||
prefixSpaceNum := len(fmt.Sprintf(" %2d | ", curLine))
|
||||
annotateLine := strings.Repeat(" ", prefixSpaceNum+errToken.Position.Column-1) + "^"
|
||||
afterSource := p.PrintTokens(afterTokens)
|
||||
return fmt.Sprintf("%s\n%s\n%s", beforeSource, annotateLine, afterSource)
|
||||
}
|
@ -0,0 +1,229 @@
|
||||
package scanner
|
||||
|
||||
import (
|
||||
"sync"
|
||||
|
||||
"github.com/goccy/go-yaml/token"
|
||||
)
|
||||
|
||||
const whitespace = ' '
|
||||
|
||||
// Context context at scanning
|
||||
type Context struct {
|
||||
idx int
|
||||
size int
|
||||
notSpaceCharPos int
|
||||
notSpaceOrgCharPos int
|
||||
src []rune
|
||||
buf []rune
|
||||
obuf []rune
|
||||
tokens token.Tokens
|
||||
isRawFolded bool
|
||||
isLiteral bool
|
||||
isFolded bool
|
||||
isSingleLine bool
|
||||
literalOpt string
|
||||
}
|
||||
|
||||
var (
|
||||
ctxPool = sync.Pool{
|
||||
New: func() interface{} {
|
||||
return createContext()
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func createContext() *Context {
|
||||
return &Context{
|
||||
idx: 0,
|
||||
tokens: token.Tokens{},
|
||||
isSingleLine: true,
|
||||
}
|
||||
}
|
||||
|
||||
func newContext(src []rune) *Context {
|
||||
ctx := ctxPool.Get().(*Context)
|
||||
ctx.reset(src)
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (c *Context) release() {
|
||||
ctxPool.Put(c)
|
||||
}
|
||||
|
||||
func (c *Context) reset(src []rune) {
|
||||
c.idx = 0
|
||||
c.size = len(src)
|
||||
c.src = src
|
||||
c.tokens = c.tokens[:0]
|
||||
c.resetBuffer()
|
||||
c.isRawFolded = false
|
||||
c.isSingleLine = true
|
||||
c.isLiteral = false
|
||||
c.isFolded = false
|
||||
c.literalOpt = ""
|
||||
}
|
||||
|
||||
func (c *Context) resetBuffer() {
|
||||
c.buf = c.buf[:0]
|
||||
c.obuf = c.obuf[:0]
|
||||
c.notSpaceCharPos = 0
|
||||
c.notSpaceOrgCharPos = 0
|
||||
}
|
||||
|
||||
func (c *Context) isSaveIndentMode() bool {
|
||||
return c.isLiteral || c.isFolded || c.isRawFolded
|
||||
}
|
||||
|
||||
func (c *Context) breakLiteral() {
|
||||
c.isLiteral = false
|
||||
c.isRawFolded = false
|
||||
c.isFolded = false
|
||||
c.literalOpt = ""
|
||||
}
|
||||
|
||||
func (c *Context) addToken(tk *token.Token) {
|
||||
if tk == nil {
|
||||
return
|
||||
}
|
||||
c.tokens = append(c.tokens, tk)
|
||||
}
|
||||
|
||||
func (c *Context) addBuf(r rune) {
|
||||
if len(c.buf) == 0 && r == ' ' {
|
||||
return
|
||||
}
|
||||
c.buf = append(c.buf, r)
|
||||
if r != ' ' && r != '\t' {
|
||||
c.notSpaceCharPos = len(c.buf)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Context) addOriginBuf(r rune) {
|
||||
c.obuf = append(c.obuf, r)
|
||||
if r != ' ' && r != '\t' {
|
||||
c.notSpaceOrgCharPos = len(c.obuf)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Context) removeRightSpaceFromBuf() int {
|
||||
trimmedBuf := c.obuf[:c.notSpaceOrgCharPos]
|
||||
buflen := len(trimmedBuf)
|
||||
diff := len(c.obuf) - buflen
|
||||
if diff > 0 {
|
||||
c.obuf = c.obuf[:buflen]
|
||||
c.buf = c.bufferedSrc()
|
||||
}
|
||||
return diff
|
||||
}
|
||||
|
||||
func (c *Context) isDocument() bool {
|
||||
return c.isLiteral || c.isFolded || c.isRawFolded
|
||||
}
|
||||
|
||||
func (c *Context) isEOS() bool {
|
||||
return len(c.src)-1 <= c.idx
|
||||
}
|
||||
|
||||
func (c *Context) isNextEOS() bool {
|
||||
return len(c.src)-1 <= c.idx+1
|
||||
}
|
||||
|
||||
func (c *Context) next() bool {
|
||||
return c.idx < c.size
|
||||
}
|
||||
|
||||
func (c *Context) source(s, e int) string {
|
||||
return string(c.src[s:e])
|
||||
}
|
||||
|
||||
func (c *Context) previousChar() rune {
|
||||
if c.idx > 0 {
|
||||
return c.src[c.idx-1]
|
||||
}
|
||||
return rune(0)
|
||||
}
|
||||
|
||||
func (c *Context) currentChar() rune {
|
||||
if c.size > c.idx {
|
||||
return c.src[c.idx]
|
||||
}
|
||||
return rune(0)
|
||||
}
|
||||
|
||||
func (c *Context) currentCharWithSkipWhitespace() rune {
|
||||
idx := c.idx
|
||||
for c.size > idx {
|
||||
ch := c.src[idx]
|
||||
if ch != whitespace {
|
||||
return ch
|
||||
}
|
||||
idx++
|
||||
}
|
||||
return rune(0)
|
||||
}
|
||||
|
||||
func (c *Context) nextChar() rune {
|
||||
if c.size > c.idx+1 {
|
||||
return c.src[c.idx+1]
|
||||
}
|
||||
return rune(0)
|
||||
}
|
||||
|
||||
func (c *Context) repeatNum(r rune) int {
|
||||
cnt := 0
|
||||
for i := c.idx; i < c.size; i++ {
|
||||
if c.src[i] == r {
|
||||
cnt++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
return cnt
|
||||
}
|
||||
|
||||
func (c *Context) progress(num int) {
|
||||
c.idx += num
|
||||
}
|
||||
|
||||
func (c *Context) nextPos() int {
|
||||
return c.idx + 1
|
||||
}
|
||||
|
||||
func (c *Context) existsBuffer() bool {
|
||||
return len(c.bufferedSrc()) != 0
|
||||
}
|
||||
|
||||
func (c *Context) bufferedSrc() []rune {
|
||||
src := c.buf[:c.notSpaceCharPos]
|
||||
if len(src) > 0 && src[len(src)-1] == '\n' && c.isDocument() && c.literalOpt == "-" {
|
||||
// remove end '\n' character
|
||||
src = src[:len(src)-1]
|
||||
}
|
||||
return src
|
||||
}
|
||||
|
||||
func (c *Context) bufferedToken(pos *token.Position) *token.Token {
|
||||
if c.idx == 0 {
|
||||
return nil
|
||||
}
|
||||
source := c.bufferedSrc()
|
||||
if len(source) == 0 {
|
||||
return nil
|
||||
}
|
||||
var tk *token.Token
|
||||
if c.isDocument() {
|
||||
tk = token.String(string(source), string(c.obuf), pos)
|
||||
} else {
|
||||
tk = token.New(string(source), string(c.obuf), pos)
|
||||
}
|
||||
c.resetBuffer()
|
||||
return tk
|
||||
}
|
||||
|
||||
func (c *Context) lastToken() *token.Token {
|
||||
if len(c.tokens) != 0 {
|
||||
return c.tokens[len(c.tokens)-1]
|
||||
}
|
||||
return nil
|
||||
}
|
@ -0,0 +1,903 @@
|
||||
package scanner
|
||||
|
||||
import (
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/goccy/go-yaml/token"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// IndentState state for indent
|
||||
type IndentState int
|
||||
|
||||
const (
|
||||
// IndentStateEqual equals previous indent
|
||||
IndentStateEqual IndentState = iota
|
||||
// IndentStateUp more indent than previous
|
||||
IndentStateUp
|
||||
// IndentStateDown less indent than previous
|
||||
IndentStateDown
|
||||
// IndentStateKeep uses not indent token
|
||||
IndentStateKeep
|
||||
)
|
||||
|
||||
// Scanner holds the scanner's internal state while processing a given text.
|
||||
// It can be allocated as part of another data structure but must be initialized via Init before use.
|
||||
type Scanner struct {
|
||||
source []rune
|
||||
sourcePos int
|
||||
sourceSize int
|
||||
line int
|
||||
column int
|
||||
offset int
|
||||
prevIndentLevel int
|
||||
prevIndentNum int
|
||||
prevIndentColumn int
|
||||
docStartColumn int
|
||||
indentLevel int
|
||||
indentNum int
|
||||
isFirstCharAtLine bool
|
||||
isAnchor bool
|
||||
startedFlowSequenceNum int
|
||||
startedFlowMapNum int
|
||||
indentState IndentState
|
||||
savedPos *token.Position
|
||||
}
|
||||
|
||||
func (s *Scanner) pos() *token.Position {
|
||||
return &token.Position{
|
||||
Line: s.line,
|
||||
Column: s.column,
|
||||
Offset: s.offset,
|
||||
IndentNum: s.indentNum,
|
||||
IndentLevel: s.indentLevel,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Scanner) bufferedToken(ctx *Context) *token.Token {
|
||||
if s.savedPos != nil {
|
||||
tk := ctx.bufferedToken(s.savedPos)
|
||||
s.savedPos = nil
|
||||
return tk
|
||||
}
|
||||
line := s.line
|
||||
column := s.column - len(ctx.buf)
|
||||
level := s.indentLevel
|
||||
if ctx.isSaveIndentMode() {
|
||||
line -= s.newLineCount(ctx.buf)
|
||||
column = strings.Index(string(ctx.obuf), string(ctx.buf)) + 1
|
||||
// Since we are in a literal, folded or raw folded
|
||||
// we can use the indent level from the last token.
|
||||
last := ctx.lastToken()
|
||||
if last != nil { // The last token should never be nil here.
|
||||
level = last.Position.IndentLevel + 1
|
||||
}
|
||||
}
|
||||
return ctx.bufferedToken(&token.Position{
|
||||
Line: line,
|
||||
Column: column,
|
||||
Offset: s.offset - len(ctx.buf),
|
||||
IndentNum: s.indentNum,
|
||||
IndentLevel: level,
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Scanner) progressColumn(ctx *Context, num int) {
|
||||
s.column += num
|
||||
s.offset += num
|
||||
ctx.progress(num)
|
||||
}
|
||||
|
||||
func (s *Scanner) progressLine(ctx *Context) {
|
||||
s.column = 1
|
||||
s.line++
|
||||
s.offset++
|
||||
s.indentNum = 0
|
||||
s.isFirstCharAtLine = true
|
||||
s.isAnchor = false
|
||||
ctx.progress(1)
|
||||
}
|
||||
|
||||
func (s *Scanner) isNeededKeepPreviousIndentNum(ctx *Context, c rune) bool {
|
||||
if !s.isChangedToIndentStateUp() {
|
||||
return false
|
||||
}
|
||||
if ctx.isDocument() {
|
||||
return true
|
||||
}
|
||||
if c == '-' && ctx.existsBuffer() {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (s *Scanner) isNewLineChar(c rune) bool {
|
||||
if c == '\n' {
|
||||
return true
|
||||
}
|
||||
if c == '\r' {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (s *Scanner) newLineCount(src []rune) int {
|
||||
size := len(src)
|
||||
cnt := 0
|
||||
for i := 0; i < size; i++ {
|
||||
c := src[i]
|
||||
switch c {
|
||||
case '\r':
|
||||
if i+1 < size && src[i+1] == '\n' {
|
||||
i++
|
||||
}
|
||||
cnt++
|
||||
case '\n':
|
||||
cnt++
|
||||
}
|
||||
}
|
||||
return cnt
|
||||
}
|
||||
|
||||
func (s *Scanner) updateIndentState(ctx *Context) {
|
||||
indentNumBasedIndentState := s.indentState
|
||||
if s.prevIndentNum < s.indentNum {
|
||||
s.indentLevel = s.prevIndentLevel + 1
|
||||
indentNumBasedIndentState = IndentStateUp
|
||||
} else if s.prevIndentNum == s.indentNum {
|
||||
s.indentLevel = s.prevIndentLevel
|
||||
indentNumBasedIndentState = IndentStateEqual
|
||||
} else {
|
||||
indentNumBasedIndentState = IndentStateDown
|
||||
if s.prevIndentLevel > 0 {
|
||||
s.indentLevel = s.prevIndentLevel - 1
|
||||
}
|
||||
}
|
||||
|
||||
if s.prevIndentColumn > 0 {
|
||||
if s.prevIndentColumn < s.column {
|
||||
s.indentState = IndentStateUp
|
||||
} else if s.prevIndentColumn != s.column || indentNumBasedIndentState != IndentStateEqual {
|
||||
// The following case ( current position is 'd' ), some variables becomes like here
|
||||
// - prevIndentColumn: 1 of 'a'
|
||||
// - indentNumBasedIndentState: IndentStateDown because d's indentNum(1) is less than c's indentNum(3).
|
||||
// Therefore, s.prevIndentColumn(1) == s.column(1) is true, but we want to treat this as IndentStateDown.
|
||||
// So, we look also current indentState value by the above prevIndentNum based logic, and determins finally indentState.
|
||||
// ---
|
||||
// a:
|
||||
// b
|
||||
// c
|
||||
// d: e
|
||||
// ^
|
||||
s.indentState = IndentStateDown
|
||||
} else {
|
||||
s.indentState = IndentStateEqual
|
||||
}
|
||||
} else {
|
||||
s.indentState = indentNumBasedIndentState
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Scanner) updateIndent(ctx *Context, c rune) {
|
||||
if s.isFirstCharAtLine && s.isNewLineChar(c) && ctx.isDocument() {
|
||||
return
|
||||
}
|
||||
if s.isFirstCharAtLine && c == ' ' {
|
||||
s.indentNum++
|
||||
return
|
||||
}
|
||||
if !s.isFirstCharAtLine {
|
||||
s.indentState = IndentStateKeep
|
||||
return
|
||||
}
|
||||
s.updateIndentState(ctx)
|
||||
s.isFirstCharAtLine = false
|
||||
if s.isNeededKeepPreviousIndentNum(ctx, c) {
|
||||
return
|
||||
}
|
||||
if s.indentState != IndentStateUp {
|
||||
s.prevIndentColumn = 0
|
||||
}
|
||||
s.prevIndentNum = s.indentNum
|
||||
s.prevIndentLevel = s.indentLevel
|
||||
}
|
||||
|
||||
func (s *Scanner) isChangedToIndentStateDown() bool {
|
||||
return s.indentState == IndentStateDown
|
||||
}
|
||||
|
||||
func (s *Scanner) isChangedToIndentStateUp() bool {
|
||||
return s.indentState == IndentStateUp
|
||||
}
|
||||
|
||||
func (s *Scanner) isChangedToIndentStateEqual() bool {
|
||||
return s.indentState == IndentStateEqual
|
||||
}
|
||||
|
||||
func (s *Scanner) addBufferedTokenIfExists(ctx *Context) {
|
||||
ctx.addToken(s.bufferedToken(ctx))
|
||||
}
|
||||
|
||||
func (s *Scanner) breakLiteral(ctx *Context) {
|
||||
s.docStartColumn = 0
|
||||
ctx.breakLiteral()
|
||||
}
|
||||
|
||||
func (s *Scanner) scanSingleQuote(ctx *Context) (tk *token.Token, pos int) {
|
||||
ctx.addOriginBuf('\'')
|
||||
srcpos := s.pos()
|
||||
startIndex := ctx.idx + 1
|
||||
src := ctx.src
|
||||
size := len(src)
|
||||
value := []rune{}
|
||||
isFirstLineChar := false
|
||||
isNewLine := false
|
||||
for idx := startIndex; idx < size; idx++ {
|
||||
if !isNewLine {
|
||||
s.progressColumn(ctx, 1)
|
||||
} else {
|
||||
isNewLine = false
|
||||
}
|
||||
c := src[idx]
|
||||
pos = idx + 1
|
||||
ctx.addOriginBuf(c)
|
||||
if s.isNewLineChar(c) {
|
||||
value = append(value, ' ')
|
||||
isFirstLineChar = true
|
||||
isNewLine = true
|
||||
s.progressLine(ctx)
|
||||
continue
|
||||
} else if c == ' ' && isFirstLineChar {
|
||||
continue
|
||||
} else if c != '\'' {
|
||||
value = append(value, c)
|
||||
isFirstLineChar = false
|
||||
continue
|
||||
}
|
||||
if idx+1 < len(ctx.src) && ctx.src[idx+1] == '\'' {
|
||||
// '' handle as ' character
|
||||
value = append(value, c)
|
||||
ctx.addOriginBuf(c)
|
||||
idx++
|
||||
continue
|
||||
}
|
||||
s.progressColumn(ctx, 1)
|
||||
tk = token.SingleQuote(string(value), string(ctx.obuf), srcpos)
|
||||
pos = idx - startIndex + 1
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func hexToInt(b rune) int {
|
||||
if b >= 'A' && b <= 'F' {
|
||||
return int(b) - 'A' + 10
|
||||
}
|
||||
if b >= 'a' && b <= 'f' {
|
||||
return int(b) - 'a' + 10
|
||||
}
|
||||
return int(b) - '0'
|
||||
}
|
||||
|
||||
func hexRunesToInt(b []rune) int {
|
||||
sum := 0
|
||||
for i := 0; i < len(b); i++ {
|
||||
sum += hexToInt(b[i]) << (uint(len(b)-i-1) * 4)
|
||||
}
|
||||
return sum
|
||||
}
|
||||
|
||||
func (s *Scanner) scanDoubleQuote(ctx *Context) (tk *token.Token, pos int) {
|
||||
ctx.addOriginBuf('"')
|
||||
srcpos := s.pos()
|
||||
startIndex := ctx.idx + 1
|
||||
src := ctx.src
|
||||
size := len(src)
|
||||
value := []rune{}
|
||||
isFirstLineChar := false
|
||||
isNewLine := false
|
||||
for idx := startIndex; idx < size; idx++ {
|
||||
if !isNewLine {
|
||||
s.progressColumn(ctx, 1)
|
||||
} else {
|
||||
isNewLine = false
|
||||
}
|
||||
c := src[idx]
|
||||
pos = idx + 1
|
||||
ctx.addOriginBuf(c)
|
||||
if s.isNewLineChar(c) {
|
||||
value = append(value, ' ')
|
||||
isFirstLineChar = true
|
||||
isNewLine = true
|
||||
s.progressLine(ctx)
|
||||
continue
|
||||
} else if c == ' ' && isFirstLineChar {
|
||||
continue
|
||||
} else if c == '\\' {
|
||||
isFirstLineChar = false
|
||||
if idx+1 < size {
|
||||
nextChar := src[idx+1]
|
||||
switch nextChar {
|
||||
case 'b':
|
||||
ctx.addOriginBuf(nextChar)
|
||||
value = append(value, '\b')
|
||||
idx++
|
||||
continue
|
||||
case 'e':
|
||||
ctx.addOriginBuf(nextChar)
|
||||
value = append(value, '\x1B')
|
||||
idx++
|
||||
continue
|
||||
case 'f':
|
||||
ctx.addOriginBuf(nextChar)
|
||||
value = append(value, '\f')
|
||||
idx++
|
||||
continue
|
||||
case 'n':
|
||||
ctx.addOriginBuf(nextChar)
|
||||
value = append(value, '\n')
|
||||
idx++
|
||||
continue
|
||||
case 'v':
|
||||
ctx.addOriginBuf(nextChar)
|
||||
value = append(value, '\v')
|
||||
idx++
|
||||
continue
|
||||
case 'L': // LS (#x2028)
|
||||
ctx.addOriginBuf(nextChar)
|
||||
value = append(value, []rune{'\xE2', '\x80', '\xA8'}...)
|
||||
idx++
|
||||
continue
|
||||
case 'N': // NEL (#x85)
|
||||
ctx.addOriginBuf(nextChar)
|
||||
value = append(value, []rune{'\xC2', '\x85'}...)
|
||||
idx++
|
||||
continue
|
||||
case 'P': // PS (#x2029)
|
||||
ctx.addOriginBuf(nextChar)
|
||||
value = append(value, []rune{'\xE2', '\x80', '\xA9'}...)
|
||||
idx++
|
||||
continue
|
||||
case '_': // #xA0
|
||||
ctx.addOriginBuf(nextChar)
|
||||
value = append(value, []rune{'\xC2', '\xA0'}...)
|
||||
idx++
|
||||
continue
|
||||
case '"':
|
||||
ctx.addOriginBuf(nextChar)
|
||||
value = append(value, nextChar)
|
||||
idx++
|
||||
continue
|
||||
case 'x':
|
||||
if idx+3 >= size {
|
||||
// TODO: need to return error
|
||||
//err = xerrors.New("invalid escape character \\x")
|
||||
return
|
||||
}
|
||||
codeNum := hexRunesToInt(src[idx+2 : idx+4])
|
||||
value = append(value, rune(codeNum))
|
||||
idx += 3
|
||||
continue
|
||||
case 'u':
|
||||
if idx+5 >= size {
|
||||
// TODO: need to return error
|
||||
//err = xerrors.New("invalid escape character \\u")
|
||||
return
|
||||
}
|
||||
codeNum := hexRunesToInt(src[idx+2 : idx+6])
|
||||
value = append(value, rune(codeNum))
|
||||
idx += 5
|
||||
continue
|
||||
case 'U':
|
||||
if idx+9 >= size {
|
||||
// TODO: need to return error
|
||||
//err = xerrors.New("invalid escape character \\U")
|
||||
return
|
||||
}
|
||||
codeNum := hexRunesToInt(src[idx+2 : idx+10])
|
||||
value = append(value, rune(codeNum))
|
||||
idx += 9
|
||||
continue
|
||||
case '\\':
|
||||
ctx.addOriginBuf(nextChar)
|
||||
idx++
|
||||
}
|
||||
}
|
||||
value = append(value, c)
|
||||
continue
|
||||
} else if c != '"' {
|
||||
value = append(value, c)
|
||||
isFirstLineChar = false
|
||||
continue
|
||||
}
|
||||
s.progressColumn(ctx, 1)
|
||||
tk = token.DoubleQuote(string(value), string(ctx.obuf), srcpos)
|
||||
pos = idx - startIndex + 1
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (s *Scanner) scanQuote(ctx *Context, ch rune) (tk *token.Token, pos int) {
|
||||
if ch == '\'' {
|
||||
return s.scanSingleQuote(ctx)
|
||||
}
|
||||
return s.scanDoubleQuote(ctx)
|
||||
}
|
||||
|
||||
func (s *Scanner) isMergeKey(ctx *Context) bool {
|
||||
if ctx.repeatNum('<') != 2 {
|
||||
return false
|
||||
}
|
||||
src := ctx.src
|
||||
size := len(src)
|
||||
for idx := ctx.idx + 2; idx < size; idx++ {
|
||||
c := src[idx]
|
||||
if c == ' ' {
|
||||
continue
|
||||
}
|
||||
if c != ':' {
|
||||
return false
|
||||
}
|
||||
if idx+1 < size {
|
||||
nc := src[idx+1]
|
||||
if nc == ' ' || s.isNewLineChar(nc) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (s *Scanner) scanTag(ctx *Context) (tk *token.Token, pos int) {
|
||||
ctx.addOriginBuf('!')
|
||||
ctx.progress(1) // skip '!' character
|
||||
for idx, c := range ctx.src[ctx.idx:] {
|
||||
pos = idx + 1
|
||||
ctx.addOriginBuf(c)
|
||||
switch c {
|
||||
case ' ', '\n', '\r':
|
||||
value := ctx.source(ctx.idx-1, ctx.idx+idx)
|
||||
tk = token.Tag(value, string(ctx.obuf), s.pos())
|
||||
pos = len([]rune(value))
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (s *Scanner) scanComment(ctx *Context) (tk *token.Token, pos int) {
|
||||
ctx.addOriginBuf('#')
|
||||
ctx.progress(1) // skip '#' character
|
||||
for idx, c := range ctx.src[ctx.idx:] {
|
||||
pos = idx + 1
|
||||
ctx.addOriginBuf(c)
|
||||
switch c {
|
||||
case '\n', '\r':
|
||||
if ctx.previousChar() == '\\' {
|
||||
continue
|
||||
}
|
||||
value := ctx.source(ctx.idx, ctx.idx+idx)
|
||||
tk = token.Comment(value, string(ctx.obuf), s.pos())
|
||||
pos = len([]rune(value)) + 1
|
||||
return
|
||||
}
|
||||
}
|
||||
// document ends with comment.
|
||||
value := string(ctx.src[ctx.idx:])
|
||||
tk = token.Comment(value, string(ctx.obuf), s.pos())
|
||||
pos = len([]rune(value)) + 1
|
||||
return
|
||||
}
|
||||
|
||||
func trimCommentFromLiteralOpt(text string) (string, error) {
|
||||
idx := strings.Index(text, "#")
|
||||
if idx < 0 {
|
||||
return text, nil
|
||||
}
|
||||
if idx == 0 {
|
||||
return "", xerrors.New("invalid literal header")
|
||||
}
|
||||
return text[:idx-1], nil
|
||||
}
|
||||
|
||||
func (s *Scanner) scanLiteral(ctx *Context, c rune) {
|
||||
ctx.addOriginBuf(c)
|
||||
if ctx.isEOS() {
|
||||
if ctx.isLiteral {
|
||||
ctx.addBuf(c)
|
||||
}
|
||||
value := ctx.bufferedSrc()
|
||||
ctx.addToken(token.String(string(value), string(ctx.obuf), s.pos()))
|
||||
ctx.resetBuffer()
|
||||
s.progressColumn(ctx, 1)
|
||||
} else if s.isNewLineChar(c) {
|
||||
if ctx.isLiteral {
|
||||
ctx.addBuf(c)
|
||||
} else {
|
||||
ctx.addBuf(' ')
|
||||
}
|
||||
s.progressLine(ctx)
|
||||
} else if s.isFirstCharAtLine && c == ' ' {
|
||||
if 0 < s.docStartColumn && s.docStartColumn <= s.column {
|
||||
ctx.addBuf(c)
|
||||
}
|
||||
s.progressColumn(ctx, 1)
|
||||
} else {
|
||||
if s.docStartColumn == 0 {
|
||||
s.docStartColumn = s.column
|
||||
}
|
||||
ctx.addBuf(c)
|
||||
s.progressColumn(ctx, 1)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Scanner) scanLiteralHeader(ctx *Context) (pos int, err error) {
|
||||
header := ctx.currentChar()
|
||||
ctx.addOriginBuf(header)
|
||||
ctx.progress(1) // skip '|' or '>' character
|
||||
for idx, c := range ctx.src[ctx.idx:] {
|
||||
pos = idx
|
||||
ctx.addOriginBuf(c)
|
||||
switch c {
|
||||
case '\n', '\r':
|
||||
value := ctx.source(ctx.idx, ctx.idx+idx)
|
||||
opt := strings.TrimRight(value, " ")
|
||||
orgOptLen := len(opt)
|
||||
opt, err = trimCommentFromLiteralOpt(opt)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
switch opt {
|
||||
case "", "+", "-",
|
||||
"0", "1", "2", "3", "4", "5", "6", "7", "8", "9":
|
||||
hasComment := len(opt) < orgOptLen
|
||||
if header == '|' {
|
||||
if hasComment {
|
||||
commentLen := orgOptLen - len(opt)
|
||||
headerPos := strings.Index(string(ctx.obuf), "|")
|
||||
litBuf := ctx.obuf[:len(ctx.obuf)-commentLen-headerPos]
|
||||
commentBuf := ctx.obuf[len(litBuf):]
|
||||
ctx.addToken(token.Literal("|"+opt, string(litBuf), s.pos()))
|
||||
s.column += len(litBuf)
|
||||
s.offset += len(litBuf)
|
||||
commentHeader := strings.Index(value, "#")
|
||||
ctx.addToken(token.Comment(string(value[commentHeader+1:]), string(commentBuf), s.pos()))
|
||||
} else {
|
||||
ctx.addToken(token.Literal("|"+opt, string(ctx.obuf), s.pos()))
|
||||
}
|
||||
ctx.isLiteral = true
|
||||
} else if header == '>' {
|
||||
if hasComment {
|
||||
commentLen := orgOptLen - len(opt)
|
||||
headerPos := strings.Index(string(ctx.obuf), ">")
|
||||
foldedBuf := ctx.obuf[:len(ctx.obuf)-commentLen-headerPos]
|
||||
commentBuf := ctx.obuf[len(foldedBuf):]
|
||||
ctx.addToken(token.Folded(">"+opt, string(foldedBuf), s.pos()))
|
||||
s.column += len(foldedBuf)
|
||||
s.offset += len(foldedBuf)
|
||||
commentHeader := strings.Index(value, "#")
|
||||
ctx.addToken(token.Comment(string(value[commentHeader+1:]), string(commentBuf), s.pos()))
|
||||
} else {
|
||||
ctx.addToken(token.Folded(">"+opt, string(ctx.obuf), s.pos()))
|
||||
}
|
||||
ctx.isFolded = true
|
||||
}
|
||||
s.indentState = IndentStateKeep
|
||||
ctx.resetBuffer()
|
||||
ctx.literalOpt = opt
|
||||
return
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
err = xerrors.New("invalid literal header")
|
||||
return
|
||||
}
|
||||
|
||||
func (s *Scanner) scanNewLine(ctx *Context, c rune) {
|
||||
if len(ctx.buf) > 0 && s.savedPos == nil {
|
||||
s.savedPos = s.pos()
|
||||
s.savedPos.Column -= len(ctx.bufferedSrc())
|
||||
}
|
||||
|
||||
// if the following case, origin buffer has unnecessary two spaces.
|
||||
// So, `removeRightSpaceFromOriginBuf` remove them, also fix column number too.
|
||||
// ---
|
||||
// a:[space][space]
|
||||
// b: c
|
||||
removedNum := ctx.removeRightSpaceFromBuf()
|
||||
if removedNum > 0 {
|
||||
s.column -= removedNum
|
||||
s.offset -= removedNum
|
||||
if s.savedPos != nil {
|
||||
s.savedPos.Column -= removedNum
|
||||
}
|
||||
}
|
||||
|
||||
if ctx.isEOS() {
|
||||
s.addBufferedTokenIfExists(ctx)
|
||||
} else if s.isAnchor {
|
||||
s.addBufferedTokenIfExists(ctx)
|
||||
}
|
||||
ctx.addBuf(' ')
|
||||
ctx.addOriginBuf(c)
|
||||
ctx.isSingleLine = false
|
||||
s.progressLine(ctx)
|
||||
}
|
||||
|
||||
func (s *Scanner) scan(ctx *Context) (pos int) {
|
||||
for ctx.next() {
|
||||
pos = ctx.nextPos()
|
||||
c := ctx.currentChar()
|
||||
s.updateIndent(ctx, c)
|
||||
if ctx.isDocument() {
|
||||
if s.isChangedToIndentStateEqual() ||
|
||||
s.isChangedToIndentStateDown() {
|
||||
s.addBufferedTokenIfExists(ctx)
|
||||
s.breakLiteral(ctx)
|
||||
} else {
|
||||
s.scanLiteral(ctx, c)
|
||||
continue
|
||||
}
|
||||
} else if s.isChangedToIndentStateDown() {
|
||||
s.addBufferedTokenIfExists(ctx)
|
||||
} else if s.isChangedToIndentStateEqual() {
|
||||
// if first character is new line character, buffer expect to raw folded literal
|
||||
if len(ctx.obuf) > 0 && s.newLineCount(ctx.obuf) <= 1 {
|
||||
// doesn't raw folded literal
|
||||
s.addBufferedTokenIfExists(ctx)
|
||||
}
|
||||
}
|
||||
switch c {
|
||||
case '{':
|
||||
if !ctx.existsBuffer() {
|
||||
ctx.addOriginBuf(c)
|
||||
ctx.addToken(token.MappingStart(string(ctx.obuf), s.pos()))
|
||||
s.startedFlowMapNum++
|
||||
s.progressColumn(ctx, 1)
|
||||
return
|
||||
}
|
||||
case '}':
|
||||
if !ctx.existsBuffer() || s.startedFlowMapNum > 0 {
|
||||
ctx.addToken(s.bufferedToken(ctx))
|
||||
ctx.addOriginBuf(c)
|
||||
ctx.addToken(token.MappingEnd(string(ctx.obuf), s.pos()))
|
||||
s.startedFlowMapNum--
|
||||
s.progressColumn(ctx, 1)
|
||||
return
|
||||
}
|
||||
case '.':
|
||||
if s.indentNum == 0 && s.column == 1 && ctx.repeatNum('.') == 3 {
|
||||
ctx.addToken(token.DocumentEnd(string(ctx.obuf)+"...", s.pos()))
|
||||
s.progressColumn(ctx, 3)
|
||||
pos += 2
|
||||
return
|
||||
}
|
||||
case '<':
|
||||
if s.isMergeKey(ctx) {
|
||||
s.prevIndentColumn = s.column
|
||||
ctx.addToken(token.MergeKey(string(ctx.obuf)+"<<", s.pos()))
|
||||
s.progressColumn(ctx, 1)
|
||||
pos++
|
||||
return
|
||||
}
|
||||
case '-':
|
||||
if s.indentNum == 0 && s.column == 1 && ctx.repeatNum('-') == 3 {
|
||||
s.addBufferedTokenIfExists(ctx)
|
||||
ctx.addToken(token.DocumentHeader(string(ctx.obuf)+"---", s.pos()))
|
||||
s.progressColumn(ctx, 3)
|
||||
pos += 2
|
||||
return
|
||||
}
|
||||
if ctx.existsBuffer() && s.isChangedToIndentStateUp() {
|
||||
// raw folded
|
||||
ctx.isRawFolded = true
|
||||
ctx.addBuf(c)
|
||||
ctx.addOriginBuf(c)
|
||||
s.progressColumn(ctx, 1)
|
||||
continue
|
||||
}
|
||||
if ctx.existsBuffer() {
|
||||
// '-' is literal
|
||||
ctx.addBuf(c)
|
||||
ctx.addOriginBuf(c)
|
||||
s.progressColumn(ctx, 1)
|
||||
continue
|
||||
}
|
||||
nc := ctx.nextChar()
|
||||
if nc == ' ' || s.isNewLineChar(nc) {
|
||||
s.addBufferedTokenIfExists(ctx)
|
||||
ctx.addOriginBuf(c)
|
||||
tk := token.SequenceEntry(string(ctx.obuf), s.pos())
|
||||
s.prevIndentColumn = tk.Position.Column
|
||||
ctx.addToken(tk)
|
||||
s.progressColumn(ctx, 1)
|
||||
return
|
||||
}
|
||||
case '[':
|
||||
if !ctx.existsBuffer() {
|
||||
ctx.addOriginBuf(c)
|
||||
ctx.addToken(token.SequenceStart(string(ctx.obuf), s.pos()))
|
||||
s.startedFlowSequenceNum++
|
||||
s.progressColumn(ctx, 1)
|
||||
return
|
||||
}
|
||||
case ']':
|
||||
if !ctx.existsBuffer() || s.startedFlowSequenceNum > 0 {
|
||||
s.addBufferedTokenIfExists(ctx)
|
||||
ctx.addOriginBuf(c)
|
||||
ctx.addToken(token.SequenceEnd(string(ctx.obuf), s.pos()))
|
||||
s.startedFlowSequenceNum--
|
||||
s.progressColumn(ctx, 1)
|
||||
return
|
||||
}
|
||||
case ',':
|
||||
if s.startedFlowSequenceNum > 0 || s.startedFlowMapNum > 0 {
|
||||
s.addBufferedTokenIfExists(ctx)
|
||||
ctx.addOriginBuf(c)
|
||||
ctx.addToken(token.CollectEntry(string(ctx.obuf), s.pos()))
|
||||
s.progressColumn(ctx, 1)
|
||||
return
|
||||
}
|
||||
case ':':
|
||||
nc := ctx.nextChar()
|
||||
if s.startedFlowMapNum > 0 || nc == ' ' || s.isNewLineChar(nc) || ctx.isNextEOS() {
|
||||
// mapping value
|
||||
tk := s.bufferedToken(ctx)
|
||||
if tk != nil {
|
||||
s.prevIndentColumn = tk.Position.Column
|
||||
ctx.addToken(tk)
|
||||
} else if tk := ctx.lastToken(); tk != nil {
|
||||
// If the map key is quote, the buffer does not exist because it has already been cut into tokens.
|
||||
// Therefore, we need to check the last token.
|
||||
if tk.Indicator == token.QuotedScalarIndicator {
|
||||
s.prevIndentColumn = tk.Position.Column
|
||||
}
|
||||
}
|
||||
ctx.addToken(token.MappingValue(s.pos()))
|
||||
s.progressColumn(ctx, 1)
|
||||
return
|
||||
}
|
||||
case '|', '>':
|
||||
if !ctx.existsBuffer() {
|
||||
progress, err := s.scanLiteralHeader(ctx)
|
||||
if err != nil {
|
||||
// TODO: returns syntax error object
|
||||
return
|
||||
}
|
||||
s.progressColumn(ctx, progress)
|
||||
s.progressLine(ctx)
|
||||
continue
|
||||
}
|
||||
case '!':
|
||||
if !ctx.existsBuffer() {
|
||||
token, progress := s.scanTag(ctx)
|
||||
ctx.addToken(token)
|
||||
s.progressColumn(ctx, progress)
|
||||
if c := ctx.previousChar(); s.isNewLineChar(c) {
|
||||
s.progressLine(ctx)
|
||||
}
|
||||
pos += progress
|
||||
return
|
||||
}
|
||||
case '%':
|
||||
if !ctx.existsBuffer() && s.indentNum == 0 {
|
||||
ctx.addToken(token.Directive(string(ctx.obuf)+"%", s.pos()))
|
||||
s.progressColumn(ctx, 1)
|
||||
return
|
||||
}
|
||||
case '?':
|
||||
nc := ctx.nextChar()
|
||||
if !ctx.existsBuffer() && nc == ' ' {
|
||||
ctx.addToken(token.MappingKey(s.pos()))
|
||||
s.progressColumn(ctx, 1)
|
||||
return
|
||||
}
|
||||
case '&':
|
||||
if !ctx.existsBuffer() {
|
||||
s.addBufferedTokenIfExists(ctx)
|
||||
ctx.addOriginBuf(c)
|
||||
ctx.addToken(token.Anchor(string(ctx.obuf), s.pos()))
|
||||
s.progressColumn(ctx, 1)
|
||||
s.isAnchor = true
|
||||
return
|
||||
}
|
||||
case '*':
|
||||
if !ctx.existsBuffer() {
|
||||
s.addBufferedTokenIfExists(ctx)
|
||||
ctx.addOriginBuf(c)
|
||||
ctx.addToken(token.Alias(string(ctx.obuf), s.pos()))
|
||||
s.progressColumn(ctx, 1)
|
||||
return
|
||||
}
|
||||
case '#':
|
||||
if !ctx.existsBuffer() || ctx.previousChar() == ' ' {
|
||||
s.addBufferedTokenIfExists(ctx)
|
||||
token, progress := s.scanComment(ctx)
|
||||
ctx.addToken(token)
|
||||
s.progressColumn(ctx, progress)
|
||||
s.progressLine(ctx)
|
||||
pos += progress
|
||||
return
|
||||
}
|
||||
case '\'', '"':
|
||||
if !ctx.existsBuffer() {
|
||||
token, progress := s.scanQuote(ctx, c)
|
||||
ctx.addToken(token)
|
||||
pos += progress
|
||||
// If the non-whitespace character immediately following the quote is ':', the quote should be treated as a map key.
|
||||
// Therefore, do not return and continue processing as a normal map key.
|
||||
if ctx.currentCharWithSkipWhitespace() == ':' {
|
||||
continue
|
||||
}
|
||||
return
|
||||
}
|
||||
case '\r', '\n':
|
||||
// There is no problem that we ignore CR which followed by LF and normalize it to LF, because of following YAML1.2 spec.
|
||||
// > Line breaks inside scalar content must be normalized by the YAML processor. Each such line break must be parsed into a single line feed character.
|
||||
// > Outside scalar content, YAML allows any line break to be used to terminate lines.
|
||||
// > -- https://yaml.org/spec/1.2/spec.html
|
||||
if c == '\r' && ctx.nextChar() == '\n' {
|
||||
ctx.addOriginBuf('\r')
|
||||
ctx.progress(1)
|
||||
c = '\n'
|
||||
}
|
||||
s.scanNewLine(ctx, c)
|
||||
continue
|
||||
case ' ':
|
||||
if ctx.isSaveIndentMode() || (!s.isAnchor && !s.isFirstCharAtLine) {
|
||||
ctx.addBuf(c)
|
||||
ctx.addOriginBuf(c)
|
||||
s.progressColumn(ctx, 1)
|
||||
continue
|
||||
}
|
||||
if s.isFirstCharAtLine {
|
||||
s.progressColumn(ctx, 1)
|
||||
ctx.addOriginBuf(c)
|
||||
continue
|
||||
}
|
||||
s.addBufferedTokenIfExists(ctx)
|
||||
pos-- // to rescan white space at next scanning for adding white space to next buffer.
|
||||
s.isAnchor = false
|
||||
return
|
||||
}
|
||||
ctx.addBuf(c)
|
||||
ctx.addOriginBuf(c)
|
||||
s.progressColumn(ctx, 1)
|
||||
}
|
||||
s.addBufferedTokenIfExists(ctx)
|
||||
return
|
||||
}
|
||||
|
||||
// Init prepares the scanner s to tokenize the text src by setting the scanner at the beginning of src.
|
||||
func (s *Scanner) Init(text string) {
|
||||
src := []rune(text)
|
||||
s.source = src
|
||||
s.sourcePos = 0
|
||||
s.sourceSize = len(src)
|
||||
s.line = 1
|
||||
s.column = 1
|
||||
s.offset = 1
|
||||
s.prevIndentLevel = 0
|
||||
s.prevIndentNum = 0
|
||||
s.prevIndentColumn = 0
|
||||
s.indentLevel = 0
|
||||
s.indentNum = 0
|
||||
s.isFirstCharAtLine = true
|
||||
}
|
||||
|
||||
// Scan scans the next token and returns the token collection. The source end is indicated by io.EOF.
|
||||
func (s *Scanner) Scan() (token.Tokens, error) {
|
||||
if s.sourcePos >= s.sourceSize {
|
||||
return nil, io.EOF
|
||||
}
|
||||
ctx := newContext(s.source[s.sourcePos:])
|
||||
defer ctx.release()
|
||||
progress := s.scan(ctx)
|
||||
s.sourcePos += progress
|
||||
var tokens token.Tokens
|
||||
tokens = append(tokens, ctx.tokens...)
|
||||
return tokens, nil
|
||||
}
|
@ -0,0 +1,103 @@
|
||||
// Copied and trimmed down from https://github.com/golang/go/blob/e3769299cd3484e018e0e2a6e1b95c2b18ce4f41/src/strconv/quote.go
|
||||
// We want to use the standard library's private "quoteWith" function rather than write our own so that we get robust unicode support.
|
||||
// Every private function called by quoteWith was copied.
|
||||
// There are 2 modifications to simplify the code:
|
||||
// 1. The unicode.IsPrint function was substituted for the custom implementation of IsPrint
|
||||
// 2. All code paths reachable only when ASCIIonly or grphicOnly are set to true were removed.
|
||||
|
||||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
const (
|
||||
lowerhex = "0123456789abcdef"
|
||||
)
|
||||
|
||||
func quoteWith(s string, quote byte) string {
|
||||
return string(appendQuotedWith(make([]byte, 0, 3*len(s)/2), s, quote))
|
||||
}
|
||||
|
||||
func appendQuotedWith(buf []byte, s string, quote byte) []byte {
|
||||
// Often called with big strings, so preallocate. If there's quoting,
|
||||
// this is conservative but still helps a lot.
|
||||
if cap(buf)-len(buf) < len(s) {
|
||||
nBuf := make([]byte, len(buf), len(buf)+1+len(s)+1)
|
||||
copy(nBuf, buf)
|
||||
buf = nBuf
|
||||
}
|
||||
buf = append(buf, quote)
|
||||
for width := 0; len(s) > 0; s = s[width:] {
|
||||
r := rune(s[0])
|
||||
width = 1
|
||||
if r >= utf8.RuneSelf {
|
||||
r, width = utf8.DecodeRuneInString(s)
|
||||
}
|
||||
if width == 1 && r == utf8.RuneError {
|
||||
buf = append(buf, `\x`...)
|
||||
buf = append(buf, lowerhex[s[0]>>4])
|
||||
buf = append(buf, lowerhex[s[0]&0xF])
|
||||
continue
|
||||
}
|
||||
buf = appendEscapedRune(buf, r, quote)
|
||||
}
|
||||
buf = append(buf, quote)
|
||||
return buf
|
||||
}
|
||||
|
||||
func appendEscapedRune(buf []byte, r rune, quote byte) []byte {
|
||||
var runeTmp [utf8.UTFMax]byte
|
||||
if r == rune(quote) || r == '\\' { // always backslashed
|
||||
buf = append(buf, '\\')
|
||||
buf = append(buf, byte(r))
|
||||
return buf
|
||||
}
|
||||
if unicode.IsPrint(r) {
|
||||
n := utf8.EncodeRune(runeTmp[:], r)
|
||||
buf = append(buf, runeTmp[:n]...)
|
||||
return buf
|
||||
}
|
||||
switch r {
|
||||
case '\a':
|
||||
buf = append(buf, `\a`...)
|
||||
case '\b':
|
||||
buf = append(buf, `\b`...)
|
||||
case '\f':
|
||||
buf = append(buf, `\f`...)
|
||||
case '\n':
|
||||
buf = append(buf, `\n`...)
|
||||
case '\r':
|
||||
buf = append(buf, `\r`...)
|
||||
case '\t':
|
||||
buf = append(buf, `\t`...)
|
||||
case '\v':
|
||||
buf = append(buf, `\v`...)
|
||||
default:
|
||||
switch {
|
||||
case r < ' ':
|
||||
buf = append(buf, `\x`...)
|
||||
buf = append(buf, lowerhex[byte(r)>>4])
|
||||
buf = append(buf, lowerhex[byte(r)&0xF])
|
||||
case r > utf8.MaxRune:
|
||||
r = 0xFFFD
|
||||
fallthrough
|
||||
case r < 0x10000:
|
||||
buf = append(buf, `\u`...)
|
||||
for s := 12; s >= 0; s -= 4 {
|
||||
buf = append(buf, lowerhex[r>>uint(s)&0xF])
|
||||
}
|
||||
default:
|
||||
buf = append(buf, `\U`...)
|
||||
for s := 28; s >= 0; s -= 4 {
|
||||
buf = append(buf, lowerhex[r>>uint(s)&0xF])
|
||||
}
|
||||
}
|
||||
}
|
||||
return buf
|
||||
}
|
@ -0,0 +1,130 @@
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
const (
|
||||
// StructTagName tag keyword for Marshal/Unmarshal
|
||||
StructTagName = "yaml"
|
||||
)
|
||||
|
||||
// StructField information for each the field in structure
|
||||
type StructField struct {
|
||||
FieldName string
|
||||
RenderName string
|
||||
AnchorName string
|
||||
AliasName string
|
||||
IsAutoAnchor bool
|
||||
IsAutoAlias bool
|
||||
IsOmitEmpty bool
|
||||
IsFlow bool
|
||||
IsInline bool
|
||||
}
|
||||
|
||||
func getTag(field reflect.StructField) string {
|
||||
// If struct tag `yaml` exist, use that. If no `yaml`
|
||||
// exists, but `json` does, use that and try the best to
|
||||
// adhere to its rules
|
||||
tag := field.Tag.Get(StructTagName)
|
||||
if tag == "" {
|
||||
tag = field.Tag.Get(`json`)
|
||||
}
|
||||
return tag
|
||||
}
|
||||
|
||||
func structField(field reflect.StructField) *StructField {
|
||||
tag := getTag(field)
|
||||
fieldName := strings.ToLower(field.Name)
|
||||
options := strings.Split(tag, ",")
|
||||
if len(options) > 0 {
|
||||
if options[0] != "" {
|
||||
fieldName = options[0]
|
||||
}
|
||||
}
|
||||
structField := &StructField{
|
||||
FieldName: field.Name,
|
||||
RenderName: fieldName,
|
||||
}
|
||||
if len(options) > 1 {
|
||||
for _, opt := range options[1:] {
|
||||
switch {
|
||||
case opt == "omitempty":
|
||||
structField.IsOmitEmpty = true
|
||||
case opt == "flow":
|
||||
structField.IsFlow = true
|
||||
case opt == "inline":
|
||||
structField.IsInline = true
|
||||
case strings.HasPrefix(opt, "anchor"):
|
||||
anchor := strings.Split(opt, "=")
|
||||
if len(anchor) > 1 {
|
||||
structField.AnchorName = anchor[1]
|
||||
} else {
|
||||
structField.IsAutoAnchor = true
|
||||
}
|
||||
case strings.HasPrefix(opt, "alias"):
|
||||
alias := strings.Split(opt, "=")
|
||||
if len(alias) > 1 {
|
||||
structField.AliasName = alias[1]
|
||||
} else {
|
||||
structField.IsAutoAlias = true
|
||||
}
|
||||
default:
|
||||
}
|
||||
}
|
||||
}
|
||||
return structField
|
||||
}
|
||||
|
||||
func isIgnoredStructField(field reflect.StructField) bool {
|
||||
if field.PkgPath != "" && !field.Anonymous {
|
||||
// private field
|
||||
return true
|
||||
}
|
||||
tag := getTag(field)
|
||||
if tag == "-" {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type StructFieldMap map[string]*StructField
|
||||
|
||||
func (m StructFieldMap) isIncludedRenderName(name string) bool {
|
||||
for _, v := range m {
|
||||
if !v.IsInline && v.RenderName == name {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (m StructFieldMap) hasMergeProperty() bool {
|
||||
for _, v := range m {
|
||||
if v.IsOmitEmpty && v.IsInline && v.IsAutoAlias {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func structFieldMap(structType reflect.Type) (StructFieldMap, error) {
|
||||
structFieldMap := StructFieldMap{}
|
||||
renderNameMap := map[string]struct{}{}
|
||||
for i := 0; i < structType.NumField(); i++ {
|
||||
field := structType.Field(i)
|
||||
if isIgnoredStructField(field) {
|
||||
continue
|
||||
}
|
||||
structField := structField(field)
|
||||
if _, exists := renderNameMap[structField.RenderName]; exists {
|
||||
return nil, xerrors.Errorf("duplicated struct field name %s", structField.RenderName)
|
||||
}
|
||||
structFieldMap[structField.FieldName] = structField
|
||||
renderNameMap[structField.RenderName] = struct{}{}
|
||||
}
|
||||
return structFieldMap, nil
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,13 @@
|
||||
package yaml
|
||||
|
||||
// StructValidator need to implement Struct method only
|
||||
// ( see https://pkg.go.dev/github.com/go-playground/validator/v10#Validate.Struct )
|
||||
type StructValidator interface {
|
||||
Struct(interface{}) error
|
||||
}
|
||||
|
||||
// FieldError need to implement StructField method only
|
||||
// ( see https://pkg.go.dev/github.com/go-playground/validator/v10#FieldError )
|
||||
type FieldError interface {
|
||||
StructField() string
|
||||
}
|
@ -0,0 +1,290 @@
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"io"
|
||||
"reflect"
|
||||
"sync"
|
||||
|
||||
"github.com/goccy/go-yaml/ast"
|
||||
"github.com/goccy/go-yaml/internal/errors"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// BytesMarshaler interface may be implemented by types to customize their
|
||||
// behavior when being marshaled into a YAML document. The returned value
|
||||
// is marshaled in place of the original value implementing Marshaler.
|
||||
//
|
||||
// If an error is returned by MarshalYAML, the marshaling procedure stops
|
||||
// and returns with the provided error.
|
||||
type BytesMarshaler interface {
|
||||
MarshalYAML() ([]byte, error)
|
||||
}
|
||||
|
||||
// BytesMarshalerContext interface use BytesMarshaler with context.Context.
|
||||
type BytesMarshalerContext interface {
|
||||
MarshalYAML(context.Context) ([]byte, error)
|
||||
}
|
||||
|
||||
// InterfaceMarshaler interface has MarshalYAML compatible with github.com/go-yaml/yaml package.
|
||||
type InterfaceMarshaler interface {
|
||||
MarshalYAML() (interface{}, error)
|
||||
}
|
||||
|
||||
// InterfaceMarshalerContext interface use InterfaceMarshaler with context.Context.
|
||||
type InterfaceMarshalerContext interface {
|
||||
MarshalYAML(context.Context) (interface{}, error)
|
||||
}
|
||||
|
||||
// BytesUnmarshaler interface may be implemented by types to customize their
|
||||
// behavior when being unmarshaled from a YAML document.
|
||||
type BytesUnmarshaler interface {
|
||||
UnmarshalYAML([]byte) error
|
||||
}
|
||||
|
||||
// BytesUnmarshalerContext interface use BytesUnmarshaler with context.Context.
|
||||
type BytesUnmarshalerContext interface {
|
||||
UnmarshalYAML(context.Context, []byte) error
|
||||
}
|
||||
|
||||
// InterfaceUnmarshaler interface has UnmarshalYAML compatible with github.com/go-yaml/yaml package.
|
||||
type InterfaceUnmarshaler interface {
|
||||
UnmarshalYAML(func(interface{}) error) error
|
||||
}
|
||||
|
||||
// InterfaceUnmarshalerContext interface use InterfaceUnmarshaler with context.Context.
|
||||
type InterfaceUnmarshalerContext interface {
|
||||
UnmarshalYAML(context.Context, func(interface{}) error) error
|
||||
}
|
||||
|
||||
// MapItem is an item in a MapSlice.
|
||||
type MapItem struct {
|
||||
Key, Value interface{}
|
||||
}
|
||||
|
||||
// MapSlice encodes and decodes as a YAML map.
|
||||
// The order of keys is preserved when encoding and decoding.
|
||||
type MapSlice []MapItem
|
||||
|
||||
// ToMap convert to map[interface{}]interface{}.
|
||||
func (s MapSlice) ToMap() map[interface{}]interface{} {
|
||||
v := map[interface{}]interface{}{}
|
||||
for _, item := range s {
|
||||
v[item.Key] = item.Value
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
// Marshal serializes the value provided into a YAML document. The structure
|
||||
// of the generated document will reflect the structure of the value itself.
|
||||
// Maps and pointers (to struct, string, int, etc) are accepted as the in value.
|
||||
//
|
||||
// Struct fields are only marshalled if they are exported (have an upper case
|
||||
// first letter), and are marshalled using the field name lowercased as the
|
||||
// default key. Custom keys may be defined via the "yaml" name in the field
|
||||
// tag: the content preceding the first comma is used as the key, and the
|
||||
// following comma-separated options are used to tweak the marshalling process.
|
||||
// Conflicting names result in a runtime error.
|
||||
//
|
||||
// The field tag format accepted is:
|
||||
//
|
||||
// `(...) yaml:"[<key>][,<flag1>[,<flag2>]]" (...)`
|
||||
//
|
||||
// The following flags are currently supported:
|
||||
//
|
||||
// omitempty Only include the field if it's not set to the zero
|
||||
// value for the type or to empty slices or maps.
|
||||
// Zero valued structs will be omitted if all their public
|
||||
// fields are zero, unless they implement an IsZero
|
||||
// method (see the IsZeroer interface type), in which
|
||||
// case the field will be included if that method returns true.
|
||||
//
|
||||
// flow Marshal using a flow style (useful for structs,
|
||||
// sequences and maps).
|
||||
//
|
||||
// inline Inline the field, which must be a struct or a map,
|
||||
// causing all of its fields or keys to be processed as if
|
||||
// they were part of the outer struct. For maps, keys must
|
||||
// not conflict with the yaml keys of other struct fields.
|
||||
//
|
||||
// anchor Marshal with anchor. If want to define anchor name explicitly, use anchor=name style.
|
||||
// Otherwise, if used 'anchor' name only, used the field name lowercased as the anchor name
|
||||
//
|
||||
// alias Marshal with alias. If want to define alias name explicitly, use alias=name style.
|
||||
// Otherwise, If omitted alias name and the field type is pointer type,
|
||||
// assigned anchor name automatically from same pointer address.
|
||||
//
|
||||
// In addition, if the key is "-", the field is ignored.
|
||||
//
|
||||
// For example:
|
||||
//
|
||||
// type T struct {
|
||||
// F int `yaml:"a,omitempty"`
|
||||
// B int
|
||||
// }
|
||||
// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n"
|
||||
// yaml.Marshal(&T{F: 1}) // Returns "a: 1\nb: 0\n"
|
||||
//
|
||||
func Marshal(v interface{}) ([]byte, error) {
|
||||
return MarshalWithOptions(v)
|
||||
}
|
||||
|
||||
// MarshalWithOptions serializes the value provided into a YAML document with EncodeOptions.
|
||||
func MarshalWithOptions(v interface{}, opts ...EncodeOption) ([]byte, error) {
|
||||
return MarshalContext(context.Background(), v, opts...)
|
||||
}
|
||||
|
||||
// MarshalContext serializes the value provided into a YAML document with context.Context and EncodeOptions.
|
||||
func MarshalContext(ctx context.Context, v interface{}, opts ...EncodeOption) ([]byte, error) {
|
||||
var buf bytes.Buffer
|
||||
if err := NewEncoder(&buf, opts...).EncodeContext(ctx, v); err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to marshal")
|
||||
}
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
// ValueToNode convert from value to ast.Node.
|
||||
func ValueToNode(v interface{}, opts ...EncodeOption) (ast.Node, error) {
|
||||
var buf bytes.Buffer
|
||||
node, err := NewEncoder(&buf, opts...).EncodeToNode(v)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to convert value to node")
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
// Unmarshal decodes the first document found within the in byte slice
|
||||
// and assigns decoded values into the out value.
|
||||
//
|
||||
// Struct fields are only unmarshalled if they are exported (have an
|
||||
// upper case first letter), and are unmarshalled using the field name
|
||||
// lowercased as the default key. Custom keys may be defined via the
|
||||
// "yaml" name in the field tag: the content preceding the first comma
|
||||
// is used as the key, and the following comma-separated options are
|
||||
// used to tweak the marshalling process (see Marshal).
|
||||
// Conflicting names result in a runtime error.
|
||||
//
|
||||
// For example:
|
||||
//
|
||||
// type T struct {
|
||||
// F int `yaml:"a,omitempty"`
|
||||
// B int
|
||||
// }
|
||||
// var t T
|
||||
// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t)
|
||||
//
|
||||
// See the documentation of Marshal for the format of tags and a list of
|
||||
// supported tag options.
|
||||
//
|
||||
func Unmarshal(data []byte, v interface{}) error {
|
||||
return UnmarshalWithOptions(data, v)
|
||||
}
|
||||
|
||||
// UnmarshalWithOptions decodes with DecodeOptions the first document found within the in byte slice
|
||||
// and assigns decoded values into the out value.
|
||||
func UnmarshalWithOptions(data []byte, v interface{}, opts ...DecodeOption) error {
|
||||
return UnmarshalContext(context.Background(), data, v, opts...)
|
||||
}
|
||||
|
||||
// UnmarshalContext decodes with context.Context and DecodeOptions.
|
||||
func UnmarshalContext(ctx context.Context, data []byte, v interface{}, opts ...DecodeOption) error {
|
||||
dec := NewDecoder(bytes.NewBuffer(data), opts...)
|
||||
if err := dec.DecodeContext(ctx, v); err != nil {
|
||||
if err == io.EOF {
|
||||
return nil
|
||||
}
|
||||
return errors.Wrapf(err, "failed to unmarshal")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// NodeToValue converts node to the value pointed to by v.
|
||||
func NodeToValue(node ast.Node, v interface{}, opts ...DecodeOption) error {
|
||||
var buf bytes.Buffer
|
||||
if err := NewDecoder(&buf, opts...).DecodeFromNode(node, v); err != nil {
|
||||
return errors.Wrapf(err, "failed to convert node to value")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// FormatError is a utility function that takes advantage of the metadata
|
||||
// stored in the errors returned by this package's parser.
|
||||
//
|
||||
// If the second argument `colored` is true, the error message is colorized.
|
||||
// If the third argument `inclSource` is true, the error message will
|
||||
// contain snippets of the YAML source that was used.
|
||||
func FormatError(e error, colored, inclSource bool) string {
|
||||
var pp errors.PrettyPrinter
|
||||
if xerrors.As(e, &pp) {
|
||||
var buf bytes.Buffer
|
||||
pp.PrettyPrint(&errors.Sink{&buf}, colored, inclSource)
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
return e.Error()
|
||||
}
|
||||
|
||||
// YAMLToJSON convert YAML bytes to JSON.
|
||||
func YAMLToJSON(bytes []byte) ([]byte, error) {
|
||||
var v interface{}
|
||||
if err := UnmarshalWithOptions(bytes, &v, UseOrderedMap()); err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to unmarshal")
|
||||
}
|
||||
out, err := MarshalWithOptions(v, JSON())
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to marshal with json option")
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// JSONToYAML convert JSON bytes to YAML.
|
||||
func JSONToYAML(bytes []byte) ([]byte, error) {
|
||||
var v interface{}
|
||||
if err := UnmarshalWithOptions(bytes, &v, UseOrderedMap()); err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to unmarshal from json bytes")
|
||||
}
|
||||
out, err := Marshal(v)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to marshal")
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
var (
|
||||
globalCustomMarshalerMu sync.Mutex
|
||||
globalCustomUnmarshalerMu sync.Mutex
|
||||
globalCustomMarshalerMap = map[reflect.Type]func(interface{}) ([]byte, error){}
|
||||
globalCustomUnmarshalerMap = map[reflect.Type]func(interface{}, []byte) error{}
|
||||
)
|
||||
|
||||
// RegisterCustomMarshaler overrides any encoding process for the type specified in generics.
|
||||
// If you want to switch the behavior for each encoder, use `CustomMarshaler` defined as EncodeOption.
|
||||
//
|
||||
// NOTE: If type T implements MarshalYAML for pointer receiver, the type specified in RegisterCustomMarshaler must be *T.
|
||||
// If RegisterCustomMarshaler and CustomMarshaler of EncodeOption are specified for the same type,
|
||||
// the CustomMarshaler specified in EncodeOption takes precedence.
|
||||
func RegisterCustomMarshaler[T any](marshaler func(T) ([]byte, error)) {
|
||||
globalCustomMarshalerMu.Lock()
|
||||
defer globalCustomMarshalerMu.Unlock()
|
||||
|
||||
var typ T
|
||||
globalCustomMarshalerMap[reflect.TypeOf(typ)] = func(v interface{}) ([]byte, error) {
|
||||
return marshaler(v.(T))
|
||||
}
|
||||
}
|
||||
|
||||
// RegisterCustomUnmarshaler overrides any decoding process for the type specified in generics.
|
||||
// If you want to switch the behavior for each decoder, use `CustomUnmarshaler` defined as DecodeOption.
|
||||
//
|
||||
// NOTE: If RegisterCustomUnmarshaler and CustomUnmarshaler of DecodeOption are specified for the same type,
|
||||
// the CustomUnmarshaler specified in DecodeOption takes precedence.
|
||||
func RegisterCustomUnmarshaler[T any](unmarshaler func(*T, []byte) error) {
|
||||
globalCustomUnmarshalerMu.Lock()
|
||||
defer globalCustomUnmarshalerMu.Unlock()
|
||||
|
||||
var typ *T
|
||||
globalCustomUnmarshalerMap[reflect.TypeOf(typ)] = func(v interface{}, b []byte) error {
|
||||
return unmarshaler(v.(*T), b)
|
||||
}
|
||||
}
|
@ -1,9 +1,15 @@
|
||||
language: go
|
||||
sudo: false
|
||||
go:
|
||||
- 1.13.x
|
||||
- tip
|
||||
|
||||
before_install:
|
||||
- go get github.com/mattn/goveralls
|
||||
- go get golang.org/x/tools/cmd/cover
|
||||
- go get -t -v ./...
|
||||
|
||||
script:
|
||||
- $HOME/gopath/bin/goveralls -repotoken xnXqRGwgW3SXIguzxf90ZSK1GPYZPaGrw
|
||||
- ./go.test.sh
|
||||
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
||||
|
||||
|
@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
echo "" > coverage.txt
|
||||
|
||||
for d in $(go list ./... | grep -v vendor); do
|
||||
go test -race -coverprofile=profile.out -covermode=atomic "$d"
|
||||
if [ -f profile.out ]; then
|
||||
cat profile.out >> coverage.txt
|
||||
rm profile.out
|
||||
fi
|
||||
done
|
@ -1,13 +1,14 @@
|
||||
language: go
|
||||
sudo: false
|
||||
go:
|
||||
- 1.13.x
|
||||
- tip
|
||||
|
||||
os:
|
||||
- linux
|
||||
- osx
|
||||
|
||||
before_install:
|
||||
- go get github.com/mattn/goveralls
|
||||
- go get golang.org/x/tools/cmd/cover
|
||||
- go get -t -v ./...
|
||||
|
||||
script:
|
||||
- $HOME/gopath/bin/goveralls -repotoken 3gHdORO5k5ziZcWMBxnd9LrMZaJs8m9x5
|
||||
- ./go.test.sh
|
||||
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
||||
|
@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
echo "" > coverage.txt
|
||||
|
||||
for d in $(go list ./... | grep -v vendor); do
|
||||
go test -race -coverprofile=profile.out -covermode=atomic "$d"
|
||||
if [ -f profile.out ]; then
|
||||
cat profile.out >> coverage.txt
|
||||
rm profile.out
|
||||
fi
|
||||
done
|
@ -1,23 +0,0 @@
|
||||
// +build android
|
||||
|
||||
package isatty
|
||||
|
||||
import (
|
||||
"syscall"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
const ioctlReadTermios = syscall.TCGETS
|
||||
|
||||
// IsTerminal return true if the file descriptor is terminal.
|
||||
func IsTerminal(fd uintptr) bool {
|
||||
var termios syscall.Termios
|
||||
_, _, err := syscall.Syscall6(syscall.SYS_IOCTL, fd, ioctlReadTermios, uintptr(unsafe.Pointer(&termios)), 0, 0, 0)
|
||||
return err == 0
|
||||
}
|
||||
|
||||
// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2
|
||||
// terminal. This is also always false on this environment.
|
||||
func IsCygwinTerminal(fd uintptr) bool {
|
||||
return false
|
||||
}
|
@ -0,0 +1,8 @@
|
||||
{
|
||||
"extends": [
|
||||
"config:base"
|
||||
],
|
||||
"postUpdateOptions": [
|
||||
"gomodTidy"
|
||||
]
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
// Copyright 2022 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build hurd
|
||||
// +build hurd
|
||||
|
||||
package unix
|
||||
|
||||
/*
|
||||
#include <stdint.h>
|
||||
int ioctl(int, unsigned long int, uintptr_t);
|
||||
*/
|
||||
import "C"
|
||||
|
||||
func ioctl(fd int, req uint, arg uintptr) (err error) {
|
||||
r0, er := C.ioctl(C.int(fd), C.ulong(req), C.uintptr_t(arg))
|
||||
if r0 == -1 && er != nil {
|
||||
err = er
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) {
|
||||
r0, er := C.ioctl(C.int(fd), C.ulong(req), C.uintptr_t(uintptr(arg)))
|
||||
if r0 == -1 && er != nil {
|
||||
err = er
|
||||
}
|
||||
return
|
||||
}
|
@ -0,0 +1,29 @@
|
||||
// Copyright 2022 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build 386 && hurd
|
||||
// +build 386,hurd
|
||||
|
||||
package unix
|
||||
|
||||
const (
|
||||
TIOCGETA = 0x62251713
|
||||
)
|
||||
|
||||
type Winsize struct {
|
||||
Row uint16
|
||||
Col uint16
|
||||
Xpixel uint16
|
||||
Ypixel uint16
|
||||
}
|
||||
|
||||
type Termios struct {
|
||||
Iflag uint32
|
||||
Oflag uint32
|
||||
Cflag uint32
|
||||
Lflag uint32
|
||||
Cc [20]uint8
|
||||
Ispeed int32
|
||||
Ospeed int32
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue