Browse Source

Define script structure + run scripts

Brendan Abolivier 4 years ago
commit
8db62da8d4
Signed by: Brendan Abolivier <contact@brendanabolivier.com> GPG key ID: 8EF1500759F70623

+ 15
- 0
.editorconfig View File

@@ -0,0 +1,15 @@
1
+root = true
2
+
3
+[*]
4
+charset = utf-8
5
+
6
+end_of_line = lf
7
+insert_final_newline = false
8
+trim_trailing_whitespace = true
9
+
10
+[*.go]
11
+indent_style = tab
12
+indent_size = 4
13
+
14
+[*.{yml,yaml}]
15
+indent_style = space

+ 4
- 0
.gitignore View File

@@ -0,0 +1,4 @@
1
+bin
2
+pkg
3
+
4
+config.yaml

+ 21
- 0
config.sample.yaml View File

@@ -0,0 +1,21 @@
1
+warp10_exec: http://localhost/api/v0/exec
2
+token: READ_TOKEN
3
+scripts:
4
+    -   key: "temperature"
5
+        script: "
6
+            NOW 'now' STORE
7
+            [
8
+                $token
9
+                'sensores.temperature'
10
+                { 'room' 'bedroom' }
11
+                $now
12
+                -1
13
+            ] FETCH
14
+            VALUES LIST-> DROP LIST-> DROP
15
+            "
16
+        # bool | number
17
+        type: number
18
+        # only required if type == number
19
+        threshold: 50
20
+        action: http
21
+        target: http://localhost:9996/alert

+ 41
- 0
src/metrics-alerting/config/config.go View File

@@ -0,0 +1,41 @@
1
+package config
2
+
3
+import (
4
+	"io/ioutil"
5
+
6
+	"gopkg.in/yaml.v2"
7
+)
8
+
9
+type Script struct {
10
+	// An identifying key for the script
11
+	Key string `yaml:"key"`
12
+	// The script to run on Warp10
13
+	Script string `yaml:"script"`
14
+	// The type of the value returned by the script
15
+	Type string `yaml:"type"`
16
+	// Value above which an action is required
17
+	Threshold string `yaml:"threshold,omitempty"`
18
+	// The action to take (either "http" or "email")
19
+	Action string `yaml:"action"`
20
+	// The action's target
21
+	Target string `yaml:"target"`
22
+}
23
+
24
+type Config struct {
25
+	// Full URL to Warp10's /exec
26
+	Warp10Exec string `yaml:"warp10_exec"`
27
+	// Warp10 read token
28
+	ReadToken string `yaml:"token"`
29
+	// WarpScripts to run, with an identifier as its key
30
+	Scripts []Script `yaml:"scripts"`
31
+}
32
+
33
+func Load(filePath string) (cfg Config, err error) {
34
+	content, err := ioutil.ReadFile(filePath)
35
+	if err != nil {
36
+		return
37
+	}
38
+
39
+	err = yaml.Unmarshal(content, &cfg)
40
+	return
41
+}

+ 26
- 0
src/metrics-alerting/main.go View File

@@ -0,0 +1,26 @@
1
+package main
2
+
3
+import (
4
+	"flag"
5
+	"fmt"
6
+
7
+	"metrics-alerting/config"
8
+	"metrics-alerting/warp10"
9
+)
10
+
11
+var (
12
+	configPath = flag.String("config", "config.yaml", "The path to the config file. For more information, see the config file in this repository.")
13
+)
14
+
15
+func main() {
16
+	flag.Parse()
17
+
18
+	cfg, _ := config.Load(*configPath)
19
+	client := warp10.Warp10Client{
20
+		ExecEndpoint: cfg.Warp10Exec,
21
+		ReadToken:    cfg.ReadToken,
22
+	}
23
+
24
+	for _, script := range cfg.Scripts {
25
+	}
26
+}

+ 60
- 0
src/metrics-alerting/warp10/warp10.go View File

@@ -0,0 +1,60 @@
1
+package warp10
2
+
3
+import (
4
+	"encoding/json"
5
+	"fmt"
6
+	"net/http"
7
+	"strings"
8
+)
9
+
10
+type Warp10Client struct {
11
+	ExecEndpoint string
12
+	ReadToken    string
13
+}
14
+
15
+func (w *Warp10Client) ReadBool(script string) (b bool, err error) {
16
+	resp, err := w.sendRequest(script)
17
+	if err != nil {
18
+		return
19
+	}
20
+
21
+	var respBody []bool
22
+	if err = json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
23
+		return
24
+	}
25
+
26
+	b = respBody[0]
27
+	return
28
+}
29
+
30
+func (w *Warp10Client) ReadFloat(script string) (f float64, err error) {
31
+	resp, err := w.sendRequest(script)
32
+	if err != nil {
33
+		return
34
+	}
35
+
36
+	var respBody []float64
37
+	if err = json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
38
+		return
39
+	}
40
+
41
+	f = respBody[0]
42
+	return
43
+}
44
+
45
+func (w *Warp10Client) appendToken(script string) string {
46
+	return fmt.Sprintf("'%s' 'token' STORE\n%s", w.ReadToken, script)
47
+}
48
+
49
+func (w *Warp10Client) sendRequest(script string) (*http.Response, error) {
50
+	script = w.appendToken(script)
51
+
52
+	client := http.Client{}
53
+
54
+	req, err := http.NewRequest("POST", w.ExecEndpoint, strings.NewReader(script))
55
+	if err != nil {
56
+		return nil, err
57
+	}
58
+
59
+	return client.Do(req)
60
+}

+ 11
- 0
vendor/manifest View File

@@ -0,0 +1,11 @@
1
+{
2
+	"version": 0,
3
+	"dependencies": [
4
+		{
5
+			"importpath": "gopkg.in/yaml.v2",
6
+			"repository": "https://gopkg.in/yaml.v2",
7
+			"revision": "287cf08546ab5e7e37d55a84f7ed3fd1db036de5",
8
+			"branch": "v2"
9
+		}
10
+	]
11
+}

+ 201
- 0
vendor/src/gopkg.in/yaml.v2/LICENSE View File

@@ -0,0 +1,201 @@
1
+                                 Apache License
2
+                           Version 2.0, January 2004
3
+                        http://www.apache.org/licenses/
4
+
5
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+   1. Definitions.
8
+
9
+      "License" shall mean the terms and conditions for use, reproduction,
10
+      and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+      "Licensor" shall mean the copyright owner or entity authorized by
13
+      the copyright owner that is granting the License.
14
+
15
+      "Legal Entity" shall mean the union of the acting entity and all
16
+      other entities that control, are controlled by, or are under common
17
+      control with that entity. For the purposes of this definition,
18
+      "control" means (i) the power, direct or indirect, to cause the
19
+      direction or management of such entity, whether by contract or
20
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+      outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+      "You" (or "Your") shall mean an individual or Legal Entity
24
+      exercising permissions granted by this License.
25
+
26
+      "Source" form shall mean the preferred form for making modifications,
27
+      including but not limited to software source code, documentation
28
+      source, and configuration files.
29
+
30
+      "Object" form shall mean any form resulting from mechanical
31
+      transformation or translation of a Source form, including but
32
+      not limited to compiled object code, generated documentation,
33
+      and conversions to other media types.
34
+
35
+      "Work" shall mean the work of authorship, whether in Source or
36
+      Object form, made available under the License, as indicated by a
37
+      copyright notice that is included in or attached to the work
38
+      (an example is provided in the Appendix below).
39
+
40
+      "Derivative Works" shall mean any work, whether in Source or Object
41
+      form, that is based on (or derived from) the Work and for which the
42
+      editorial revisions, annotations, elaborations, or other modifications
43
+      represent, as a whole, an original work of authorship. For the purposes
44
+      of this License, Derivative Works shall not include works that remain
45
+      separable from, or merely link (or bind by name) to the interfaces of,
46
+      the Work and Derivative Works thereof.
47
+
48
+      "Contribution" shall mean any work of authorship, including
49
+      the original version of the Work and any modifications or additions
50
+      to that Work or Derivative Works thereof, that is intentionally
51
+      submitted to Licensor for inclusion in the Work by the copyright owner
52
+      or by an individual or Legal Entity authorized to submit on behalf of
53
+      the copyright owner. For the purposes of this definition, "submitted"
54
+      means any form of electronic, verbal, or written communication sent
55
+      to the Licensor or its representatives, including but not limited to
56
+      communication on electronic mailing lists, source code control systems,
57
+      and issue tracking systems that are managed by, or on behalf of, the
58
+      Licensor for the purpose of discussing and improving the Work, but
59
+      excluding communication that is conspicuously marked or otherwise
60
+      designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+      "Contributor" shall mean Licensor and any individual or Legal Entity
63
+      on behalf of whom a Contribution has been received by Licensor and
64
+      subsequently incorporated within the Work.
65
+
66
+   2. Grant of Copyright License. Subject to the terms and conditions of
67
+      this License, each Contributor hereby grants to You a perpetual,
68
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+      copyright license to reproduce, prepare Derivative Works of,
70
+      publicly display, publicly perform, sublicense, and distribute the
71
+      Work and such Derivative Works in Source or Object form.
72
+
73
+   3. Grant of Patent License. Subject to the terms and conditions of
74
+      this License, each Contributor hereby grants to You a perpetual,
75
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+      (except as stated in this section) patent license to make, have made,
77
+      use, offer to sell, sell, import, and otherwise transfer the Work,
78
+      where such license applies only to those patent claims licensable
79
+      by such Contributor that are necessarily infringed by their
80
+      Contribution(s) alone or by combination of their Contribution(s)
81
+      with the Work to which such Contribution(s) was submitted. If You
82
+      institute patent litigation against any entity (including a
83
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+      or a Contribution incorporated within the Work constitutes direct
85
+      or contributory patent infringement, then any patent licenses
86
+      granted to You under this License for that Work shall terminate
87
+      as of the date such litigation is filed.
88
+
89
+   4. Redistribution. You may reproduce and distribute copies of the
90
+      Work or Derivative Works thereof in any medium, with or without
91
+      modifications, and in Source or Object form, provided that You
92
+      meet the following conditions:
93
+
94
+      (a) You must give any other recipients of the Work or
95
+          Derivative Works a copy of this License; and
96
+
97
+      (b) You must cause any modified files to carry prominent notices
98
+          stating that You changed the files; and
99
+
100
+      (c) You must retain, in the Source form of any Derivative Works
101
+          that You distribute, all copyright, patent, trademark, and
102
+          attribution notices from the Source form of the Work,
103
+          excluding those notices that do not pertain to any part of
104
+          the Derivative Works; and
105
+
106
+      (d) If the Work includes a "NOTICE" text file as part of its
107
+          distribution, then any Derivative Works that You distribute must
108
+          include a readable copy of the attribution notices contained
109
+          within such NOTICE file, excluding those notices that do not
110
+          pertain to any part of the Derivative Works, in at least one
111
+          of the following places: within a NOTICE text file distributed
112
+          as part of the Derivative Works; within the Source form or
113
+          documentation, if provided along with the Derivative Works; or,
114
+          within a display generated by the Derivative Works, if and
115
+          wherever such third-party notices normally appear. The contents
116
+          of the NOTICE file are for informational purposes only and
117
+          do not modify the License. You may add Your own attribution
118
+          notices within Derivative Works that You distribute, alongside
119
+          or as an addendum to the NOTICE text from the Work, provided
120
+          that such additional attribution notices cannot be construed
121
+          as modifying the License.
122
+
123
+      You may add Your own copyright statement to Your modifications and
124
+      may provide additional or different license terms and conditions
125
+      for use, reproduction, or distribution of Your modifications, or
126
+      for any such Derivative Works as a whole, provided Your use,
127
+      reproduction, and distribution of the Work otherwise complies with
128
+      the conditions stated in this License.
129
+
130
+   5. Submission of Contributions. Unless You explicitly state otherwise,
131
+      any Contribution intentionally submitted for inclusion in the Work
132
+      by You to the Licensor shall be under the terms and conditions of
133
+      this License, without any additional terms or conditions.
134
+      Notwithstanding the above, nothing herein shall supersede or modify
135
+      the terms of any separate license agreement you may have executed
136
+      with Licensor regarding such Contributions.
137
+
138
+   6. Trademarks. This License does not grant permission to use the trade
139
+      names, trademarks, service marks, or product names of the Licensor,
140
+      except as required for reasonable and customary use in describing the
141
+      origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+   7. Disclaimer of Warranty. Unless required by applicable law or
144
+      agreed to in writing, Licensor provides the Work (and each
145
+      Contributor provides its Contributions) on an "AS IS" BASIS,
146
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+      implied, including, without limitation, any warranties or conditions
148
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+      PARTICULAR PURPOSE. You are solely responsible for determining the
150
+      appropriateness of using or redistributing the Work and assume any
151
+      risks associated with Your exercise of permissions under this License.
152
+
153
+   8. Limitation of Liability. In no event and under no legal theory,
154
+      whether in tort (including negligence), contract, or otherwise,
155
+      unless required by applicable law (such as deliberate and grossly
156
+      negligent acts) or agreed to in writing, shall any Contributor be
157
+      liable to You for damages, including any direct, indirect, special,
158
+      incidental, or consequential damages of any character arising as a
159
+      result of this License or out of the use or inability to use the
160
+      Work (including but not limited to damages for loss of goodwill,
161
+      work stoppage, computer failure or malfunction, or any and all
162
+      other commercial damages or losses), even if such Contributor
163
+      has been advised of the possibility of such damages.
164
+
165
+   9. Accepting Warranty or Additional Liability. While redistributing
166
+      the Work or Derivative Works thereof, You may choose to offer,
167
+      and charge a fee for, acceptance of support, warranty, indemnity,
168
+      or other liability obligations and/or rights consistent with this
169
+      License. However, in accepting such obligations, You may act only
170
+      on Your own behalf and on Your sole responsibility, not on behalf
171
+      of any other Contributor, and only if You agree to indemnify,
172
+      defend, and hold each Contributor harmless for any liability
173
+      incurred by, or claims asserted against, such Contributor by reason
174
+      of your accepting any such warranty or additional liability.
175
+
176
+   END OF TERMS AND CONDITIONS
177
+
178
+   APPENDIX: How to apply the Apache License to your work.
179
+
180
+      To apply the Apache License to your work, attach the following
181
+      boilerplate notice, with the fields enclosed by brackets "{}"
182
+      replaced with your own identifying information. (Don't include
183
+      the brackets!)  The text should be enclosed in the appropriate
184
+      comment syntax for the file format. We also recommend that a
185
+      file or class name and description of purpose be included on the
186
+      same "printed page" as the copyright notice for easier
187
+      identification within third-party archives.
188
+
189
+   Copyright {yyyy} {name of copyright owner}
190
+
191
+   Licensed under the Apache License, Version 2.0 (the "License");
192
+   you may not use this file except in compliance with the License.
193
+   You may obtain a copy of the License at
194
+
195
+       http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+   Unless required by applicable law or agreed to in writing, software
198
+   distributed under the License is distributed on an "AS IS" BASIS,
199
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+   See the License for the specific language governing permissions and
201
+   limitations under the License.

+ 31
- 0
vendor/src/gopkg.in/yaml.v2/LICENSE.libyaml View File

@@ -0,0 +1,31 @@
1
+The following files were ported to Go from C files of libyaml, and thus
2
+are still covered by their original copyright and license:
3
+
4
+    apic.go
5
+    emitterc.go
6
+    parserc.go
7
+    readerc.go
8
+    scannerc.go
9
+    writerc.go
10
+    yamlh.go
11
+    yamlprivateh.go
12
+
13
+Copyright (c) 2006 Kirill Simonov
14
+
15
+Permission is hereby granted, free of charge, to any person obtaining a copy of
16
+this software and associated documentation files (the "Software"), to deal in
17
+the Software without restriction, including without limitation the rights to
18
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
19
+of the Software, and to permit persons to whom the Software is furnished to do
20
+so, subject to the following conditions:
21
+
22
+The above copyright notice and this permission notice shall be included in all
23
+copies or substantial portions of the Software.
24
+
25
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
26
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
27
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
28
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
29
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
30
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
31
+SOFTWARE.

+ 133
- 0
vendor/src/gopkg.in/yaml.v2/README.md View File

@@ -0,0 +1,133 @@
1
+# YAML support for the Go language
2
+
3
+Introduction
4
+------------
5
+
6
+The yaml package enables Go programs to comfortably encode and decode YAML
7
+values. It was developed within [Canonical](https://www.canonical.com) as
8
+part of the [juju](https://juju.ubuntu.com) project, and is based on a
9
+pure Go port of the well-known [libyaml](http://pyyaml.org/wiki/LibYAML)
10
+C library to parse and generate YAML data quickly and reliably.
11
+
12
+Compatibility
13
+-------------
14
+
15
+The yaml package supports most of YAML 1.1 and 1.2, including support for
16
+anchors, tags, map merging, etc. Multi-document unmarshalling is not yet
17
+implemented, and base-60 floats from YAML 1.1 are purposefully not
18
+supported since they're a poor design and are gone in YAML 1.2.
19
+
20
+Installation and usage
21
+----------------------
22
+
23
+The import path for the package is *gopkg.in/yaml.v2*.
24
+
25
+To install it, run:
26
+
27
+    go get gopkg.in/yaml.v2
28
+
29
+API documentation
30
+-----------------
31
+
32
+If opened in a browser, the import path itself leads to the API documentation:
33
+
34
+  * [https://gopkg.in/yaml.v2](https://gopkg.in/yaml.v2)
35
+
36
+API stability
37
+-------------
38
+
39
+The package API for yaml v2 will remain stable as described in [gopkg.in](https://gopkg.in).
40
+
41
+
42
+License
43
+-------
44
+
45
+The yaml package is licensed under the Apache License 2.0. Please see the LICENSE file for details.
46
+
47
+
48
+Example
49
+-------
50
+
51
+Some more examples can be found in the "examples" folder.
52
+
53
+```Go
54
+package main
55
+
56
+import (
57
+        "fmt"
58
+        "log"
59
+
60
+        "gopkg.in/yaml.v2"
61
+)
62
+
63
+var data = `
64
+a: Easy!
65
+b:
66
+  c: 2
67
+  d: [3, 4]
68
+`
69
+
70
+type T struct {
71
+        A string
72
+        B struct {
73
+                RenamedC int   `yaml:"c"`
74
+                D        []int `yaml:",flow"`
75
+        }
76
+}
77
+
78
+func main() {
79
+        t := T{}
80
+    
81
+        err := yaml.Unmarshal([]byte(data), &t)
82
+        if err != nil {
83
+                log.Fatalf("error: %v", err)
84
+        }
85
+        fmt.Printf("--- t:\n%v\n\n", t)
86
+    
87
+        d, err := yaml.Marshal(&t)
88
+        if err != nil {
89
+                log.Fatalf("error: %v", err)
90
+        }
91
+        fmt.Printf("--- t dump:\n%s\n\n", string(d))
92
+    
93
+        m := make(map[interface{}]interface{})
94
+    
95
+        err = yaml.Unmarshal([]byte(data), &m)
96
+        if err != nil {
97
+                log.Fatalf("error: %v", err)
98
+        }
99
+        fmt.Printf("--- m:\n%v\n\n", m)
100
+    
101
+        d, err = yaml.Marshal(&m)
102
+        if err != nil {
103
+                log.Fatalf("error: %v", err)
104
+        }
105
+        fmt.Printf("--- m dump:\n%s\n\n", string(d))
106
+}
107
+```
108
+
109
+This example will generate the following output:
110
+
111
+```
112
+--- t:
113
+{Easy! {2 [3 4]}}
114
+
115
+--- t dump:
116
+a: Easy!
117
+b:
118
+  c: 2
119
+  d: [3, 4]
120
+
121
+
122
+--- m:
123
+map[a:Easy! b:map[c:2 d:[3 4]]]
124
+
125
+--- m dump:
126
+a: Easy!
127
+b:
128
+  c: 2
129
+  d:
130
+  - 3
131
+  - 4
132
+```
133
+

+ 742
- 0
vendor/src/gopkg.in/yaml.v2/apic.go View File

@@ -0,0 +1,742 @@
1
+package yaml
2
+
3
+import (
4
+	"io"
5
+	"os"
6
+)
7
+
8
+func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) {
9
+	//fmt.Println("yaml_insert_token", "pos:", pos, "typ:", token.typ, "head:", parser.tokens_head, "len:", len(parser.tokens))
10
+
11
+	// Check if we can move the queue at the beginning of the buffer.
12
+	if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) {
13
+		if parser.tokens_head != len(parser.tokens) {
14
+			copy(parser.tokens, parser.tokens[parser.tokens_head:])
15
+		}
16
+		parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head]
17
+		parser.tokens_head = 0
18
+	}
19
+	parser.tokens = append(parser.tokens, *token)
20
+	if pos < 0 {
21
+		return
22
+	}
23
+	copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:])
24
+	parser.tokens[parser.tokens_head+pos] = *token
25
+}
26
+
27
+// Create a new parser object.
28
+func yaml_parser_initialize(parser *yaml_parser_t) bool {
29
+	*parser = yaml_parser_t{
30
+		raw_buffer: make([]byte, 0, input_raw_buffer_size),
31
+		buffer:     make([]byte, 0, input_buffer_size),
32
+	}
33
+	return true
34
+}
35
+
36
+// Destroy a parser object.
37
+func yaml_parser_delete(parser *yaml_parser_t) {
38
+	*parser = yaml_parser_t{}
39
+}
40
+
41
+// String read handler.
42
+func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
43
+	if parser.input_pos == len(parser.input) {
44
+		return 0, io.EOF
45
+	}
46
+	n = copy(buffer, parser.input[parser.input_pos:])
47
+	parser.input_pos += n
48
+	return n, nil
49
+}
50
+
51
+// File read handler.
52
+func yaml_file_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
53
+	return parser.input_file.Read(buffer)
54
+}
55
+
56
+// Set a string input.
57
+func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) {
58
+	if parser.read_handler != nil {
59
+		panic("must set the input source only once")
60
+	}
61
+	parser.read_handler = yaml_string_read_handler
62
+	parser.input = input
63
+	parser.input_pos = 0
64
+}
65
+
66
+// Set a file input.
67
+func yaml_parser_set_input_file(parser *yaml_parser_t, file *os.File) {
68
+	if parser.read_handler != nil {
69
+		panic("must set the input source only once")
70
+	}
71
+	parser.read_handler = yaml_file_read_handler
72
+	parser.input_file = file
73
+}
74
+
75
+// Set the source encoding.
76
+func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) {
77
+	if parser.encoding != yaml_ANY_ENCODING {
78
+		panic("must set the encoding only once")
79
+	}
80
+	parser.encoding = encoding
81
+}
82
+
83
+// Create a new emitter object.
84
+func yaml_emitter_initialize(emitter *yaml_emitter_t) bool {
85
+	*emitter = yaml_emitter_t{
86
+		buffer:     make([]byte, output_buffer_size),
87
+		raw_buffer: make([]byte, 0, output_raw_buffer_size),
88
+		states:     make([]yaml_emitter_state_t, 0, initial_stack_size),
89
+		events:     make([]yaml_event_t, 0, initial_queue_size),
90
+	}
91
+	return true
92
+}
93
+
94
+// Destroy an emitter object.
95
+func yaml_emitter_delete(emitter *yaml_emitter_t) {
96
+	*emitter = yaml_emitter_t{}
97
+}
98
+
99
+// String write handler.
100
+func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
101
+	*emitter.output_buffer = append(*emitter.output_buffer, buffer...)
102
+	return nil
103
+}
104
+
105
+// File write handler.
106
+func yaml_file_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
107
+	_, err := emitter.output_file.Write(buffer)
108
+	return err
109
+}
110
+
111
+// Set a string output.
112
+func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]byte) {
113
+	if emitter.write_handler != nil {
114
+		panic("must set the output target only once")
115
+	}
116
+	emitter.write_handler = yaml_string_write_handler
117
+	emitter.output_buffer = output_buffer
118
+}
119
+
120
+// Set a file output.
121
+func yaml_emitter_set_output_file(emitter *yaml_emitter_t, file io.Writer) {
122
+	if emitter.write_handler != nil {
123
+		panic("must set the output target only once")
124
+	}
125
+	emitter.write_handler = yaml_file_write_handler
126
+	emitter.output_file = file
127
+}
128
+
129
+// Set the output encoding.
130
+func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) {
131
+	if emitter.encoding != yaml_ANY_ENCODING {
132
+		panic("must set the output encoding only once")
133
+	}
134
+	emitter.encoding = encoding
135
+}
136
+
137
+// Set the canonical output style.
138
+func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) {
139
+	emitter.canonical = canonical
140
+}
141
+
142
+//// Set the indentation increment.
143
+func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) {
144
+	if indent < 2 || indent > 9 {
145
+		indent = 2
146
+	}
147
+	emitter.best_indent = indent
148
+}
149
+
150
+// Set the preferred line width.
151
+func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) {
152
+	if width < 0 {
153
+		width = -1
154
+	}
155
+	emitter.best_width = width
156
+}
157
+
158
+// Set if unescaped non-ASCII characters are allowed.
159
+func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) {
160
+	emitter.unicode = unicode
161
+}
162
+
163
+// Set the preferred line break character.
164
+func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) {
165
+	emitter.line_break = line_break
166
+}
167
+
168
+///*
169
+// * Destroy a token object.
170
+// */
171
+//
172
+//YAML_DECLARE(void)
173
+//yaml_token_delete(yaml_token_t *token)
174
+//{
175
+//    assert(token);  // Non-NULL token object expected.
176
+//
177
+//    switch (token.type)
178
+//    {
179
+//        case YAML_TAG_DIRECTIVE_TOKEN:
180
+//            yaml_free(token.data.tag_directive.handle);
181
+//            yaml_free(token.data.tag_directive.prefix);
182
+//            break;
183
+//
184
+//        case YAML_ALIAS_TOKEN:
185
+//            yaml_free(token.data.alias.value);
186
+//            break;
187
+//
188
+//        case YAML_ANCHOR_TOKEN:
189
+//            yaml_free(token.data.anchor.value);
190
+//            break;
191
+//
192
+//        case YAML_TAG_TOKEN:
193
+//            yaml_free(token.data.tag.handle);
194
+//            yaml_free(token.data.tag.suffix);
195
+//            break;
196
+//
197
+//        case YAML_SCALAR_TOKEN:
198
+//            yaml_free(token.data.scalar.value);
199
+//            break;
200
+//
201
+//        default:
202
+//            break;
203
+//    }
204
+//
205
+//    memset(token, 0, sizeof(yaml_token_t));
206
+//}
207
+//
208
+///*
209
+// * Check if a string is a valid UTF-8 sequence.
210
+// *
211
+// * Check 'reader.c' for more details on UTF-8 encoding.
212
+// */
213
+//
214
+//static int
215
+//yaml_check_utf8(yaml_char_t *start, size_t length)
216
+//{
217
+//    yaml_char_t *end = start+length;
218
+//    yaml_char_t *pointer = start;
219
+//
220
+//    while (pointer < end) {
221
+//        unsigned char octet;
222
+//        unsigned int width;
223
+//        unsigned int value;
224
+//        size_t k;
225
+//
226
+//        octet = pointer[0];
227
+//        width = (octet & 0x80) == 0x00 ? 1 :
228
+//                (octet & 0xE0) == 0xC0 ? 2 :
229
+//                (octet & 0xF0) == 0xE0 ? 3 :
230
+//                (octet & 0xF8) == 0xF0 ? 4 : 0;
231
+//        value = (octet & 0x80) == 0x00 ? octet & 0x7F :
232
+//                (octet & 0xE0) == 0xC0 ? octet & 0x1F :
233
+//                (octet & 0xF0) == 0xE0 ? octet & 0x0F :
234
+//                (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0;
235
+//        if (!width) return 0;
236
+//        if (pointer+width > end) return 0;
237
+//        for (k = 1; k < width; k ++) {
238
+//            octet = pointer[k];
239
+//            if ((octet & 0xC0) != 0x80) return 0;
240
+//            value = (value << 6) + (octet & 0x3F);
241
+//        }
242
+//        if (!((width == 1) ||
243
+//            (width == 2 && value >= 0x80) ||
244
+//            (width == 3 && value >= 0x800) ||
245
+//            (width == 4 && value >= 0x10000))) return 0;
246
+//
247
+//        pointer += width;
248
+//    }
249
+//
250
+//    return 1;
251
+//}
252
+//
253
+
254
+// Create STREAM-START.
255
+func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) bool {
256
+	*event = yaml_event_t{
257
+		typ:      yaml_STREAM_START_EVENT,
258
+		encoding: encoding,
259
+	}
260
+	return true
261
+}
262
+
263
+// Create STREAM-END.
264
+func yaml_stream_end_event_initialize(event *yaml_event_t) bool {
265
+	*event = yaml_event_t{
266
+		typ: yaml_STREAM_END_EVENT,
267
+	}
268
+	return true
269
+}
270
+
271
+// Create DOCUMENT-START.
272
+func yaml_document_start_event_initialize(event *yaml_event_t, version_directive *yaml_version_directive_t,
273
+	tag_directives []yaml_tag_directive_t, implicit bool) bool {
274
+	*event = yaml_event_t{
275
+		typ:               yaml_DOCUMENT_START_EVENT,
276
+		version_directive: version_directive,
277
+		tag_directives:    tag_directives,
278
+		implicit:          implicit,
279
+	}
280
+	return true
281
+}
282
+
283
+// Create DOCUMENT-END.
284
+func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) bool {
285
+	*event = yaml_event_t{
286
+		typ:      yaml_DOCUMENT_END_EVENT,
287
+		implicit: implicit,
288
+	}
289
+	return true
290
+}
291
+
292
+///*
293
+// * Create ALIAS.
294
+// */
295
+//
296
+//YAML_DECLARE(int)
297
+//yaml_alias_event_initialize(event *yaml_event_t, anchor *yaml_char_t)
298
+//{
299
+//    mark yaml_mark_t = { 0, 0, 0 }
300
+//    anchor_copy *yaml_char_t = NULL
301
+//
302
+//    assert(event) // Non-NULL event object is expected.
303
+//    assert(anchor) // Non-NULL anchor is expected.
304
+//
305
+//    if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0
306
+//
307
+//    anchor_copy = yaml_strdup(anchor)
308
+//    if (!anchor_copy)
309
+//        return 0
310
+//
311
+//    ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark)
312
+//
313
+//    return 1
314
+//}
315
+
316
+// Create SCALAR.
317
+func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool {
318
+	*event = yaml_event_t{
319
+		typ:             yaml_SCALAR_EVENT,
320
+		anchor:          anchor,
321
+		tag:             tag,
322
+		value:           value,
323
+		implicit:        plain_implicit,
324
+		quoted_implicit: quoted_implicit,
325
+		style:           yaml_style_t(style),
326
+	}
327
+	return true
328
+}
329
+
330
+// Create SEQUENCE-START.
331
+func yaml_sequence_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_sequence_style_t) bool {
332
+	*event = yaml_event_t{
333
+		typ:      yaml_SEQUENCE_START_EVENT,
334
+		anchor:   anchor,
335
+		tag:      tag,
336
+		implicit: implicit,
337
+		style:    yaml_style_t(style),
338
+	}
339
+	return true
340
+}
341
+
342
+// Create SEQUENCE-END.
343
+func yaml_sequence_end_event_initialize(event *yaml_event_t) bool {
344
+	*event = yaml_event_t{
345
+		typ: yaml_SEQUENCE_END_EVENT,
346
+	}
347
+	return true
348
+}
349
+
350
+// Create MAPPING-START.
351
+func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) bool {
352
+	*event = yaml_event_t{
353
+		typ:      yaml_MAPPING_START_EVENT,
354
+		anchor:   anchor,
355
+		tag:      tag,
356
+		implicit: implicit,
357
+		style:    yaml_style_t(style),
358
+	}
359
+	return true
360
+}
361
+
362
+// Create MAPPING-END.
363
+func yaml_mapping_end_event_initialize(event *yaml_event_t) bool {
364
+	*event = yaml_event_t{
365
+		typ: yaml_MAPPING_END_EVENT,
366
+	}
367
+	return true
368
+}
369
+
370
+// Destroy an event object.
371
+func yaml_event_delete(event *yaml_event_t) {
372
+	*event = yaml_event_t{}
373
+}
374
+
375
+///*
376
+// * Create a document object.
377
+// */
378
+//
379
+//YAML_DECLARE(int)
380
+//yaml_document_initialize(document *yaml_document_t,
381
+//        version_directive *yaml_version_directive_t,
382
+//        tag_directives_start *yaml_tag_directive_t,
383
+//        tag_directives_end *yaml_tag_directive_t,
384
+//        start_implicit int, end_implicit int)
385
+//{
386
+//    struct {
387
+//        error yaml_error_type_t
388
+//    } context
389
+//    struct {
390
+//        start *yaml_node_t
391
+//        end *yaml_node_t
392
+//        top *yaml_node_t
393
+//    } nodes = { NULL, NULL, NULL }
394
+//    version_directive_copy *yaml_version_directive_t = NULL
395
+//    struct {
396
+//        start *yaml_tag_directive_t
397
+//        end *yaml_tag_directive_t
398
+//        top *yaml_tag_directive_t
399
+//    } tag_directives_copy = { NULL, NULL, NULL }
400
+//    value yaml_tag_directive_t = { NULL, NULL }
401
+//    mark yaml_mark_t = { 0, 0, 0 }
402
+//
403
+//    assert(document) // Non-NULL document object is expected.
404
+//    assert((tag_directives_start && tag_directives_end) ||
405
+//            (tag_directives_start == tag_directives_end))
406
+//                            // Valid tag directives are expected.
407
+//
408
+//    if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error
409
+//
410
+//    if (version_directive) {
411
+//        version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t))
412
+//        if (!version_directive_copy) goto error
413
+//        version_directive_copy.major = version_directive.major
414
+//        version_directive_copy.minor = version_directive.minor
415
+//    }
416
+//
417
+//    if (tag_directives_start != tag_directives_end) {
418
+//        tag_directive *yaml_tag_directive_t
419
+//        if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE))
420
+//            goto error
421
+//        for (tag_directive = tag_directives_start
422
+//                tag_directive != tag_directives_end; tag_directive ++) {
423
+//            assert(tag_directive.handle)
424
+//            assert(tag_directive.prefix)
425
+//            if (!yaml_check_utf8(tag_directive.handle,
426
+//                        strlen((char *)tag_directive.handle)))
427
+//                goto error
428
+//            if (!yaml_check_utf8(tag_directive.prefix,
429
+//                        strlen((char *)tag_directive.prefix)))
430
+//                goto error
431
+//            value.handle = yaml_strdup(tag_directive.handle)
432
+//            value.prefix = yaml_strdup(tag_directive.prefix)
433
+//            if (!value.handle || !value.prefix) goto error
434
+//            if (!PUSH(&context, tag_directives_copy, value))
435
+//                goto error
436
+//            value.handle = NULL
437
+//            value.prefix = NULL
438
+//        }
439
+//    }
440
+//
441
+//    DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy,
442
+//            tag_directives_copy.start, tag_directives_copy.top,
443
+//            start_implicit, end_implicit, mark, mark)
444
+//
445
+//    return 1
446
+//
447
+//error:
448
+//    STACK_DEL(&context, nodes)
449
+//    yaml_free(version_directive_copy)
450
+//    while (!STACK_EMPTY(&context, tag_directives_copy)) {
451
+//        value yaml_tag_directive_t = POP(&context, tag_directives_copy)
452
+//        yaml_free(value.handle)
453
+//        yaml_free(value.prefix)
454
+//    }
455
+//    STACK_DEL(&context, tag_directives_copy)
456
+//    yaml_free(value.handle)
457
+//    yaml_free(value.prefix)
458
+//
459
+//    return 0
460
+//}
461
+//
462
+///*
463
+// * Destroy a document object.
464
+// */
465
+//
466
+//YAML_DECLARE(void)
467
+//yaml_document_delete(document *yaml_document_t)
468
+//{
469
+//    struct {
470
+//        error yaml_error_type_t
471
+//    } context
472
+//    tag_directive *yaml_tag_directive_t
473
+//
474
+//    context.error = YAML_NO_ERROR // Eliminate a compliler warning.
475
+//
476
+//    assert(document) // Non-NULL document object is expected.
477
+//
478
+//    while (!STACK_EMPTY(&context, document.nodes)) {
479
+//        node yaml_node_t = POP(&context, document.nodes)
480
+//        yaml_free(node.tag)
481
+//        switch (node.type) {
482
+//            case YAML_SCALAR_NODE:
483
+//                yaml_free(node.data.scalar.value)
484
+//                break
485
+//            case YAML_SEQUENCE_NODE:
486
+//                STACK_DEL(&context, node.data.sequence.items)
487
+//                break
488
+//            case YAML_MAPPING_NODE:
489
+//                STACK_DEL(&context, node.data.mapping.pairs)
490
+//                break
491
+//            default:
492
+//                assert(0) // Should not happen.
493
+//        }
494
+//    }
495
+//    STACK_DEL(&context, document.nodes)
496
+//
497
+//    yaml_free(document.version_directive)
498
+//    for (tag_directive = document.tag_directives.start
499
+//            tag_directive != document.tag_directives.end
500
+//            tag_directive++) {
501
+//        yaml_free(tag_directive.handle)
502
+//        yaml_free(tag_directive.prefix)
503
+//    }
504
+//    yaml_free(document.tag_directives.start)
505
+//
506
+//    memset(document, 0, sizeof(yaml_document_t))
507
+//}
508
+//
509
+///**
510
+// * Get a document node.
511
+// */
512
+//
513
+//YAML_DECLARE(yaml_node_t *)
514
+//yaml_document_get_node(document *yaml_document_t, index int)
515
+//{
516
+//    assert(document) // Non-NULL document object is expected.
517
+//
518
+//    if (index > 0 && document.nodes.start + index <= document.nodes.top) {
519
+//        return document.nodes.start + index - 1
520
+//    }
521
+//    return NULL
522
+//}
523
+//
524
+///**
525
+// * Get the root object.
526
+// */
527
+//
528
+//YAML_DECLARE(yaml_node_t *)
529
+//yaml_document_get_root_node(document *yaml_document_t)
530
+//{
531
+//    assert(document) // Non-NULL document object is expected.
532
+//
533
+//    if (document.nodes.top != document.nodes.start) {
534
+//        return document.nodes.start
535
+//    }
536
+//    return NULL
537
+//}
538
+//
539
+///*
540
+// * Add a scalar node to a document.
541
+// */
542
+//
543
+//YAML_DECLARE(int)
544
+//yaml_document_add_scalar(document *yaml_document_t,
545
+//        tag *yaml_char_t, value *yaml_char_t, length int,
546
+//        style yaml_scalar_style_t)
547
+//{
548
+//    struct {
549
+//        error yaml_error_type_t
550
+//    } context
551
+//    mark yaml_mark_t = { 0, 0, 0 }
552
+//    tag_copy *yaml_char_t = NULL
553
+//    value_copy *yaml_char_t = NULL
554
+//    node yaml_node_t
555
+//
556
+//    assert(document) // Non-NULL document object is expected.
557
+//    assert(value) // Non-NULL value is expected.
558
+//
559
+//    if (!tag) {
560
+//        tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG
561
+//    }
562
+//
563
+//    if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
564
+//    tag_copy = yaml_strdup(tag)
565
+//    if (!tag_copy) goto error
566
+//
567
+//    if (length < 0) {
568
+//        length = strlen((char *)value)
569
+//    }
570
+//
571
+//    if (!yaml_check_utf8(value, length)) goto error
572
+//    value_copy = yaml_malloc(length+1)
573
+//    if (!value_copy) goto error
574
+//    memcpy(value_copy, value, length)
575
+//    value_copy[length] = '\0'
576
+//
577
+//    SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark)
578
+//    if (!PUSH(&context, document.nodes, node)) goto error
579
+//
580
+//    return document.nodes.top - document.nodes.start
581
+//
582
+//error:
583
+//    yaml_free(tag_copy)
584
+//    yaml_free(value_copy)
585
+//
586
+//    return 0
587
+//}
588
+//
589
+///*
590
+// * Add a sequence node to a document.
591
+// */
592
+//
593
+//YAML_DECLARE(int)
594
+//yaml_document_add_sequence(document *yaml_document_t,
595
+//        tag *yaml_char_t, style yaml_sequence_style_t)
596
+//{
597
+//    struct {
598
+//        error yaml_error_type_t
599
+//    } context
600
+//    mark yaml_mark_t = { 0, 0, 0 }
601
+//    tag_copy *yaml_char_t = NULL
602
+//    struct {
603
+//        start *yaml_node_item_t
604
+//        end *yaml_node_item_t
605
+//        top *yaml_node_item_t
606
+//    } items = { NULL, NULL, NULL }
607
+//    node yaml_node_t
608
+//
609
+//    assert(document) // Non-NULL document object is expected.
610
+//
611
+//    if (!tag) {
612
+//        tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG
613
+//    }
614
+//
615
+//    if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
616
+//    tag_copy = yaml_strdup(tag)
617
+//    if (!tag_copy) goto error
618
+//
619
+//    if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error
620
+//
621
+//    SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end,
622
+//            style, mark, mark)
623
+//    if (!PUSH(&context, document.nodes, node)) goto error
624
+//
625
+//    return document.nodes.top - document.nodes.start
626
+//
627
+//error:
628
+//    STACK_DEL(&context, items)
629
+//    yaml_free(tag_copy)
630
+//
631
+//    return 0
632
+//}
633
+//
634
+///*
635
+// * Add a mapping node to a document.
636
+// */
637
+//
638
+//YAML_DECLARE(int)
639
+//yaml_document_add_mapping(document *yaml_document_t,
640
+//        tag *yaml_char_t, style yaml_mapping_style_t)
641
+//{
642
+//    struct {
643
+//        error yaml_error_type_t
644
+//    } context
645
+//    mark yaml_mark_t = { 0, 0, 0 }
646
+//    tag_copy *yaml_char_t = NULL
647
+//    struct {
648
+//        start *yaml_node_pair_t
649
+//        end *yaml_node_pair_t
650
+//        top *yaml_node_pair_t
651
+//    } pairs = { NULL, NULL, NULL }
652
+//    node yaml_node_t
653
+//
654
+//    assert(document) // Non-NULL document object is expected.
655
+//
656
+//    if (!tag) {
657
+//        tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG
658
+//    }
659
+//
660
+//    if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
661
+//    tag_copy = yaml_strdup(tag)
662
+//    if (!tag_copy) goto error
663
+//
664
+//    if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error
665
+//
666
+//    MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end,
667
+//            style, mark, mark)
668
+//    if (!PUSH(&context, document.nodes, node)) goto error
669
+//
670
+//    return document.nodes.top - document.nodes.start
671
+//
672
+//error:
673
+//    STACK_DEL(&context, pairs)
674
+//    yaml_free(tag_copy)
675
+//
676
+//    return 0
677
+//}
678
+//
679
+///*
680
+// * Append an item to a sequence node.
681
+// */
682
+//
683
+//YAML_DECLARE(int)
684
+//yaml_document_append_sequence_item(document *yaml_document_t,
685
+//        sequence int, item int)
686
+//{
687
+//    struct {
688
+//        error yaml_error_type_t
689
+//    } context
690
+//
691
+//    assert(document) // Non-NULL document is required.
692
+//    assert(sequence > 0
693
+//            && document.nodes.start + sequence <= document.nodes.top)
694
+//                            // Valid sequence id is required.
695
+//    assert(document.nodes.start[sequence-1].type == YAML_SEQUENCE_NODE)
696
+//                            // A sequence node is required.
697
+//    assert(item > 0 && document.nodes.start + item <= document.nodes.top)
698
+//                            // Valid item id is required.
699
+//
700
+//    if (!PUSH(&context,
701
+//                document.nodes.start[sequence-1].data.sequence.items, item))
702
+//        return 0
703
+//
704
+//    return 1
705
+//}
706
+//
707
+///*
708
+// * Append a pair of a key and a value to a mapping node.
709
+// */
710
+//
711
+//YAML_DECLARE(int)
712
+//yaml_document_append_mapping_pair(document *yaml_document_t,
713
+//        mapping int, key int, value int)
714
+//{
715
+//    struct {
716
+//        error yaml_error_type_t
717
+//    } context
718
+//
719
+//    pair yaml_node_pair_t
720
+//
721
+//    assert(document) // Non-NULL document is required.
722
+//    assert(mapping > 0
723
+//            && document.nodes.start + mapping <= document.nodes.top)
724
+//                            // Valid mapping id is required.
725
+//    assert(document.nodes.start[mapping-1].type == YAML_MAPPING_NODE)
726
+//                            // A mapping node is required.
727
+//    assert(key > 0 && document.nodes.start + key <= document.nodes.top)
728
+//                            // Valid key id is required.
729
+//    assert(value > 0 && document.nodes.start + value <= document.nodes.top)
730
+//                            // Valid value id is required.
731
+//
732
+//    pair.key = key
733
+//    pair.value = value
734
+//
735
+//    if (!PUSH(&context,
736
+//                document.nodes.start[mapping-1].data.mapping.pairs, pair))
737
+//        return 0
738
+//
739
+//    return 1
740
+//}
741
+//
742
+//

+ 685
- 0
vendor/src/gopkg.in/yaml.v2/decode.go View File

@@ -0,0 +1,685 @@
1
+package yaml
2
+
3
+import (
4
+	"encoding"
5
+	"encoding/base64"
6
+	"fmt"
7
+	"math"
8
+	"reflect"
9
+	"strconv"
10
+	"time"
11
+)
12
+
13
+const (
14
+	documentNode = 1 << iota
15
+	mappingNode
16
+	sequenceNode
17
+	scalarNode
18
+	aliasNode
19
+)
20
+
21
+type node struct {
22
+	kind         int
23
+	line, column int
24
+	tag          string
25
+	value        string
26
+	implicit     bool
27
+	children     []*node
28
+	anchors      map[string]*node
29
+}
30
+
31
+// ----------------------------------------------------------------------------
32
+// Parser, produces a node tree out of a libyaml event stream.
33
+
34
+type parser struct {
35
+	parser yaml_parser_t
36
+	event  yaml_event_t
37
+	doc    *node
38
+}
39
+
40
+func newParser(b []byte) *parser {
41
+	p := parser{}
42
+	if !yaml_parser_initialize(&p.parser) {
43
+		panic("failed to initialize YAML emitter")
44
+	}
45
+
46
+	if len(b) == 0 {
47
+		b = []byte{'\n'}
48
+	}
49
+
50
+	yaml_parser_set_input_string(&p.parser, b)
51
+
52
+	p.skip()
53
+	if p.event.typ != yaml_STREAM_START_EVENT {
54
+		panic("expected stream start event, got " + strconv.Itoa(int(p.event.typ)))
55
+	}
56
+	p.skip()
57
+	return &p
58
+}
59
+
60
+func (p *parser) destroy() {
61
+	if p.event.typ != yaml_NO_EVENT {
62
+		yaml_event_delete(&p.event)
63
+	}
64
+	yaml_parser_delete(&p.parser)
65
+}
66
+
67
+func (p *parser) skip() {
68
+	if p.event.typ != yaml_NO_EVENT {
69
+		if p.event.typ == yaml_STREAM_END_EVENT {
70
+			failf("attempted to go past the end of stream; corrupted value?")
71
+		}
72
+		yaml_event_delete(&p.event)
73
+	}
74
+	if !yaml_parser_parse(&p.parser, &p.event) {
75
+		p.fail()
76
+	}
77
+}
78
+
79
+func (p *parser) fail() {
80
+	var where string
81
+	var line int
82
+	if p.parser.problem_mark.line != 0 {
83
+		line = p.parser.problem_mark.line
84
+	} else if p.parser.context_mark.line != 0 {
85
+		line = p.parser.context_mark.line
86
+	}
87
+	if line != 0 {
88
+		where = "line " + strconv.Itoa(line) + ": "
89
+	}
90
+	var msg string
91
+	if len(p.parser.problem) > 0 {
92
+		msg = p.parser.problem
93
+	} else {
94
+		msg = "unknown problem parsing YAML content"
95
+	}
96
+	failf("%s%s", where, msg)
97
+}
98
+
99
+func (p *parser) anchor(n *node, anchor []byte) {
100
+	if anchor != nil {
101
+		p.doc.anchors[string(anchor)] = n
102
+	}
103
+}
104
+
105
+func (p *parser) parse() *node {
106
+	switch p.event.typ {
107
+	case yaml_SCALAR_EVENT:
108
+		return p.scalar()
109
+	case yaml_ALIAS_EVENT:
110
+		return p.alias()
111
+	case yaml_MAPPING_START_EVENT:
112
+		return p.mapping()
113
+	case yaml_SEQUENCE_START_EVENT:
114
+		return p.sequence()
115
+	case yaml_DOCUMENT_START_EVENT:
116
+		return p.document()
117
+	case yaml_STREAM_END_EVENT:
118
+		// Happens when attempting to decode an empty buffer.
119
+		return nil
120
+	default:
121
+		panic("attempted to parse unknown event: " + strconv.Itoa(int(p.event.typ)))
122
+	}
123
+}
124
+
125
+func (p *parser) node(kind int) *node {
126
+	return &node{
127
+		kind:   kind,
128
+		line:   p.event.start_mark.line,
129
+		column: p.event.start_mark.column,
130
+	}
131
+}
132
+
133
+func (p *parser) document() *node {
134
+	n := p.node(documentNode)
135
+	n.anchors = make(map[string]*node)
136
+	p.doc = n
137
+	p.skip()
138
+	n.children = append(n.children, p.parse())
139
+	if p.event.typ != yaml_DOCUMENT_END_EVENT {
140
+		panic("expected end of document event but got " + strconv.Itoa(int(p.event.typ)))
141
+	}
142
+	p.skip()
143
+	return n
144
+}
145
+
146
+func (p *parser) alias() *node {
147
+	n := p.node(aliasNode)
148
+	n.value = string(p.event.anchor)
149
+	p.skip()
150
+	return n
151
+}
152
+
153
+func (p *parser) scalar() *node {
154
+	n := p.node(scalarNode)
155
+	n.value = string(p.event.value)
156
+	n.tag = string(p.event.tag)
157
+	n.implicit = p.event.implicit
158
+	p.anchor(n, p.event.anchor)
159
+	p.skip()
160
+	return n
161
+}
162
+
163
+func (p *parser) sequence() *node {
164
+	n := p.node(sequenceNode)
165
+	p.anchor(n, p.event.anchor)
166
+	p.skip()
167
+	for p.event.typ != yaml_SEQUENCE_END_EVENT {
168
+		n.children = append(n.children, p.parse())
169
+	}
170
+	p.skip()
171
+	return n
172
+}
173
+
174
+func (p *parser) mapping() *node {
175
+	n := p.node(mappingNode)
176
+	p.anchor(n, p.event.anchor)
177
+	p.skip()
178
+	for p.event.typ != yaml_MAPPING_END_EVENT {
179
+		n.children = append(n.children, p.parse(), p.parse())
180
+	}
181
+	p.skip()
182
+	return n
183
+}
184
+
185
+// ----------------------------------------------------------------------------
186
+// Decoder, unmarshals a node into a provided value.
187
+
188
+type decoder struct {
189
+	doc     *node
190
+	aliases map[string]bool
191
+	mapType reflect.Type
192
+	terrors []string
193
+	strict  bool
194
+}
195
+
196
+var (
197
+	mapItemType    = reflect.TypeOf(MapItem{})
198
+	durationType   = reflect.TypeOf(time.Duration(0))
199
+	defaultMapType = reflect.TypeOf(map[interface{}]interface{}{})
200
+	ifaceType      = defaultMapType.Elem()
201
+)
202
+
203
+func newDecoder(strict bool) *decoder {
204
+	d := &decoder{mapType: defaultMapType, strict: strict}
205
+	d.aliases = make(map[string]bool)
206
+	return d
207
+}
208
+
209
+func (d *decoder) terror(n *node, tag string, out reflect.Value) {
210
+	if n.tag != "" {
211
+		tag = n.tag
212
+	}
213
+	value := n.value
214
+	if tag != yaml_SEQ_TAG && tag != yaml_MAP_TAG {
215
+		if len(value) > 10 {
216
+			value = " `" + value[:7] + "...`"
217
+		} else {
218
+			value = " `" + value + "`"
219
+		}
220
+	}
221
+	d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.line+1, shortTag(tag), value, out.Type()))
222
+}
223
+
224
+func (d *decoder) callUnmarshaler(n *node, u Unmarshaler) (good bool) {
225
+	terrlen := len(d.terrors)
226
+	err := u.UnmarshalYAML(func(v interface{}) (err error) {
227
+		defer handleErr(&err)
228
+		d.unmarshal(n, reflect.ValueOf(v))
229
+		if len(d.terrors) > terrlen {
230
+			issues := d.terrors[terrlen:]
231
+			d.terrors = d.terrors[:terrlen]
232
+			return &TypeError{issues}
233
+		}
234
+		return nil
235
+	})
236
+	if e, ok := err.(*TypeError); ok {
237
+		d.terrors = append(d.terrors, e.Errors...)
238
+		return false
239
+	}
240
+	if err != nil {
241
+		fail(err)
242
+	}
243
+	return true
244
+}
245
+
246
+// d.prepare initializes and dereferences pointers and calls UnmarshalYAML
247
+// if a value is found to implement it.
248
+// It returns the initialized and dereferenced out value, whether
249
+// unmarshalling was already done by UnmarshalYAML, and if so whether
250
+// its types unmarshalled appropriately.
251
+//
252
+// If n holds a null value, prepare returns before doing anything.
253
+func (d *decoder) prepare(n *node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) {
254
+	if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "~" || n.value == "" && n.implicit) {
255
+		return out, false, false
256
+	}
257
+	again := true
258
+	for again {
259
+		again = false
260
+		if out.Kind() == reflect.Ptr {
261
+			if out.IsNil() {
262
+				out.Set(reflect.New(out.Type().Elem()))
263
+			}
264
+			out = out.Elem()
265
+			again = true
266
+		}
267
+		if out.CanAddr() {
268
+			if u, ok := out.Addr().Interface().(Unmarshaler); ok {
269
+				good = d.callUnmarshaler(n, u)
270
+				return out, true, good
271
+			}
272
+		}
273
+	}
274
+	return out, false, false
275
+}
276
+
277
+func (d *decoder) unmarshal(n *node, out reflect.Value) (good bool) {
278
+	switch n.kind {
279
+	case documentNode:
280
+		return d.document(n, out)
281
+	case aliasNode:
282
+		return d.alias(n, out)
283
+	}
284
+	out, unmarshaled, good := d.prepare(n, out)
285
+	if unmarshaled {
286
+		return good
287
+	}
288
+	switch n.kind {
289
+	case scalarNode:
290
+		good = d.scalar(n, out)
291
+	case mappingNode:
292
+		good = d.mapping(n, out)
293
+	case sequenceNode:
294
+		good = d.sequence(n, out)
295
+	default:
296
+		panic("internal error: unknown node kind: " + strconv.Itoa(n.kind))
297
+	}
298
+	return good
299
+}
300
+
301
+func (d *decoder) document(n *node, out reflect.Value) (good bool) {
302
+	if len(n.children) == 1 {
303
+		d.doc = n
304
+		d.unmarshal(n.children[0], out)
305
+		return true
306
+	}
307
+	return false
308
+}
309
+
310
+func (d *decoder) alias(n *node, out reflect.Value) (good bool) {
311
+	an, ok := d.doc.anchors[n.value]
312
+	if !ok {
313
+		failf("unknown anchor '%s' referenced", n.value)
314
+	}
315
+	if d.aliases[n.value] {
316
+		failf("anchor '%s' value contains itself", n.value)
317
+	}
318
+	d.aliases[n.value] = true
319
+	good = d.unmarshal(an, out)
320
+	delete(d.aliases, n.value)
321
+	return good
322
+}
323
+
324
+var zeroValue reflect.Value
325
+
326
+func resetMap(out reflect.Value) {
327
+	for _, k := range out.MapKeys() {
328
+		out.SetMapIndex(k, zeroValue)
329
+	}
330
+}
331
+
332
+func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
333
+	var tag string
334
+	var resolved interface{}
335
+	if n.tag == "" && !n.implicit {
336
+		tag = yaml_STR_TAG
337
+		resolved = n.value
338
+	} else {
339
+		tag, resolved = resolve(n.tag, n.value)
340
+		if tag == yaml_BINARY_TAG {
341
+			data, err := base64.StdEncoding.DecodeString(resolved.(string))
342
+			if err != nil {
343
+				failf("!!binary value contains invalid base64 data")
344
+			}
345
+			resolved = string(data)
346
+		}
347
+	}
348
+	if resolved == nil {
349
+		if out.Kind() == reflect.Map && !out.CanAddr() {
350
+			resetMap(out)
351
+		} else {
352
+			out.Set(reflect.Zero(out.Type()))
353
+		}
354
+		return true
355
+	}
356
+	if s, ok := resolved.(string); ok && out.CanAddr() {
357
+		if u, ok := out.Addr().Interface().(encoding.TextUnmarshaler); ok {
358
+			err := u.UnmarshalText([]byte(s))
359
+			if err != nil {
360
+				fail(err)
361
+			}
362
+			return true
363
+		}
364
+	}
365
+	switch out.Kind() {
366
+	case reflect.String:
367
+		if tag == yaml_BINARY_TAG {
368
+			out.SetString(resolved.(string))
369
+			good = true
370
+		} else if resolved != nil {
371
+			out.SetString(n.value)
372
+			good = true
373
+		}
374
+	case reflect.Interface:
375
+		if resolved == nil {
376
+			out.Set(reflect.Zero(out.Type()))
377
+		} else {
378
+			out.Set(reflect.ValueOf(resolved))
379
+		}
380
+		good = true
381
+	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
382
+		switch resolved := resolved.(type) {
383
+		case int:
384
+			if !out.OverflowInt(int64(resolved)) {
385
+				out.SetInt(int64(resolved))
386
+				good = true
387
+			}
388
+		case int64:
389
+			if !out.OverflowInt(resolved) {
390
+				out.SetInt(resolved)
391
+				good = true
392
+			}
393
+		case uint64:
394
+			if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
395
+				out.SetInt(int64(resolved))
396
+				good = true
397
+			}
398
+		case float64:
399
+			if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
400
+				out.SetInt(int64(resolved))
401
+				good = true
402
+			}
403
+		case string:
404
+			if out.Type() == durationType {
405
+				d, err := time.ParseDuration(resolved)
406
+				if err == nil {
407
+					out.SetInt(int64(d))
408
+					good = true
409
+				}
410
+			}
411
+		}
412
+	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
413
+		switch resolved := resolved.(type) {
414
+		case int:
415
+			if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
416
+				out.SetUint(uint64(resolved))
417
+				good = true
418
+			}
419
+		case int64:
420
+			if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
421
+				out.SetUint(uint64(resolved))
422
+				good = true
423
+			}
424
+		case uint64:
425
+			if !out.OverflowUint(uint64(resolved)) {
426
+				out.SetUint(uint64(resolved))
427
+				good = true
428
+			}
429
+		case float64:
430
+			if resolved <= math.MaxUint64 && !out.OverflowUint(uint64(resolved)) {
431
+				out.SetUint(uint64(resolved))
432
+				good = true
433
+			}
434
+		}
435
+	case reflect.Bool:
436
+		switch resolved := resolved.(type) {
437
+		case bool:
438
+			out.SetBool(resolved)
439
+			good = true
440
+		}
441
+	case reflect.Float32, reflect.Float64:
442
+		switch resolved := resolved.(type) {
443
+		case int:
444
+			out.SetFloat(float64(resolved))
445
+			good = true
446
+		case int64:
447
+			out.SetFloat(float64(resolved))
448
+			good = true
449
+		case uint64:
450
+			out.SetFloat(float64(resolved))
451
+			good = true
452
+		case float64:
453
+			out.SetFloat(resolved)
454
+			good = true
455
+		}
456
+	case reflect.Ptr:
457
+		if out.Type().Elem() == reflect.TypeOf(resolved) {
458
+			// TODO DOes this make sense? When is out a Ptr except when decoding a nil value?
459
+			elem := reflect.New(out.Type().Elem())
460
+			elem.Elem().Set(reflect.ValueOf(resolved))
461
+			out.Set(elem)
462
+			good = true
463
+		}
464
+	}
465
+	if !good {
466
+		d.terror(n, tag, out)
467
+	}
468
+	return good
469
+}
470
+
471
+func settableValueOf(i interface{}) reflect.Value {
472
+	v := reflect.ValueOf(i)
473
+	sv := reflect.New(v.Type()).Elem()
474
+	sv.Set(v)
475
+	return sv
476
+}
477
+
478
+func (d *decoder) sequence(n *node, out reflect.Value) (good bool) {
479
+	l := len(n.children)
480
+
481
+	var iface reflect.Value
482
+	switch out.Kind() {
483
+	case reflect.Slice:
484
+		out.Set(reflect.MakeSlice(out.Type(), l, l))
485
+	case reflect.Interface:
486
+		// No type hints. Will have to use a generic sequence.
487
+		iface = out
488
+		out = settableValueOf(make([]interface{}, l))
489
+	default:
490
+		d.terror(n, yaml_SEQ_TAG, out)
491
+		return false
492
+	}
493
+	et := out.Type().Elem()
494
+
495
+	j := 0
496
+	for i := 0; i < l; i++ {
497
+		e := reflect.New(et).Elem()
498
+		if ok := d.unmarshal(n.children[i], e); ok {
499
+			out.Index(j).Set(e)
500
+			j++
501
+		}
502
+	}
503
+	out.Set(out.Slice(0, j))
504
+	if iface.IsValid() {
505
+		iface.Set(out)
506
+	}
507
+	return true
508
+}
509
+
510
+func (d *decoder) mapping(n *node, out reflect.Value) (good bool) {
511
+	switch out.Kind() {
512
+	case reflect.Struct:
513
+		return d.mappingStruct(n, out)
514
+	case reflect.Slice:
515
+		return d.mappingSlice(n, out)
516
+	case reflect.Map:
517
+		// okay
518
+	case reflect.Interface:
519
+		if d.mapType.Kind() == reflect.Map {
520
+			iface := out
521
+			out = reflect.MakeMap(d.mapType)
522
+			iface.Set(out)
523
+		} else {
524
+			slicev := reflect.New(d.mapType).Elem()
525
+			if !d.mappingSlice(n, slicev) {
526
+				return false
527
+			}
528
+			out.Set(slicev)
529
+			return true
530
+		}
531
+	default:
532
+		d.terror(n, yaml_MAP_TAG, out)
533
+		return false
534
+	}
535
+	outt := out.Type()
536
+	kt := outt.Key()
537
+	et := outt.Elem()
538
+
539
+	mapType := d.mapType
540
+	if outt.Key() == ifaceType && outt.Elem() == ifaceType {
541
+		d.mapType = outt
542
+	}
543
+
544
+	if out.IsNil() {
545
+		out.Set(reflect.MakeMap(outt))
546
+	}
547
+	l := len(n.children)
548
+	for i := 0; i < l; i += 2 {
549
+		if isMerge(n.children[i]) {
550
+			d.merge(n.children[i+1], out)
551
+			continue
552
+		}
553
+		k := reflect.New(kt).Elem()
554
+		if d.unmarshal(n.children[i], k) {
555
+			kkind := k.Kind()
556
+			if kkind == reflect.Interface {
557
+				kkind = k.Elem().Kind()
558
+			}
559
+			if kkind == reflect.Map || kkind == reflect.Slice {
560
+				failf("invalid map key: %#v", k.Interface())
561
+			}
562
+			e := reflect.New(et).Elem()
563
+			if d.unmarshal(n.children[i+1], e) {
564
+				out.SetMapIndex(k, e)
565
+			}
566
+		}
567
+	}
568
+	d.mapType = mapType
569
+	return true
570
+}
571
+
572
+func (d *decoder) mappingSlice(n *node, out reflect.Value) (good bool) {
573
+	outt := out.Type()
574
+	if outt.Elem() != mapItemType {
575
+		d.terror(n, yaml_MAP_TAG, out)
576
+		return false
577
+	}
578
+
579
+	mapType := d.mapType
580
+	d.mapType = outt
581
+
582
+	var slice []MapItem
583
+	var l = len(n.children)
584
+	for i := 0; i < l; i += 2 {
585
+		if isMerge(n.children[i]) {
586
+			d.merge(n.children[i+1], out)
587
+			continue
588
+		}
589
+		item := MapItem{}
590
+		k := reflect.ValueOf(&item.Key).Elem()
591
+		if d.unmarshal(n.children[i], k) {
592
+			v := reflect.ValueOf(&item.Value).Elem()
593
+			if d.unmarshal(n.children[i+1], v) {
594
+				slice = append(slice, item)
595
+			}
596
+		}
597
+	}
598
+	out.Set(reflect.ValueOf(slice))
599
+	d.mapType = mapType
600
+	return true
601
+}
602
+
603
+func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
604
+	sinfo, err := getStructInfo(out.Type())
605
+	if err != nil {
606
+		panic(err)
607
+	}
608
+	name := settableValueOf("")
609
+	l := len(n.children)
610
+
611
+	var inlineMap reflect.Value
612
+	var elemType reflect.Type
613
+	if sinfo.InlineMap != -1 {
614
+		inlineMap = out.Field(sinfo.InlineMap)
615
+		inlineMap.Set(reflect.New(inlineMap.Type()).Elem())
616
+		elemType = inlineMap.Type().Elem()
617
+	}
618
+
619
+	for i := 0; i < l; i += 2 {
620
+		ni := n.children[i]
621
+		if isMerge(ni) {
622
+			d.merge(n.children[i+1], out)
623
+			continue
624
+		}
625
+		if !d.unmarshal(ni, name) {
626
+			continue
627
+		}
628
+		if info, ok := sinfo.FieldsMap[name.String()]; ok {
629
+			var field reflect.Value
630
+			if info.Inline == nil {
631
+				field = out.Field(info.Num)
632
+			} else {
633
+				field = out.FieldByIndex(info.Inline)
634
+			}
635
+			d.unmarshal(n.children[i+1], field)
636
+		} else if sinfo.InlineMap != -1 {
637
+			if inlineMap.IsNil() {
638
+				inlineMap.Set(reflect.MakeMap(inlineMap.Type()))
639
+			}
640
+			value := reflect.New(elemType).Elem()
641
+			d.unmarshal(n.children[i+1], value)
642
+			inlineMap.SetMapIndex(name, value)
643
+		} else if d.strict {
644
+			d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in struct %s", n.line+1, name.String(), out.Type()))
645
+		}
646
+	}
647
+	return true
648
+}
649
+
650
+func failWantMap() {
651
+	failf("map merge requires map or sequence of maps as the value")
652
+}
653
+
654
+func (d *decoder) merge(n *node, out reflect.Value) {
655
+	switch n.kind {
656
+	case mappingNode:
657
+		d.unmarshal(n, out)
658
+	case aliasNode:
659
+		an, ok := d.doc.anchors[n.value]
660
+		if ok && an.kind != mappingNode {
661
+			failWantMap()
662
+		}
663
+		d.unmarshal(n, out)
664
+	case sequenceNode:
665
+		// Step backwards as earlier nodes take precedence.
666
+		for i := len(n.children) - 1; i >= 0; i-- {
667
+			ni := n.children[i]
668
+			if ni.kind == aliasNode {
669
+				an, ok := d.doc.anchors[ni.value]
670
+				if ok && an.kind != mappingNode {
671
+					failWantMap()
672
+				}
673
+			} else if ni.kind != mappingNode {
674
+				failWantMap()
675
+			}
676
+			d.unmarshal(ni, out)
677
+		}
678
+	default:
679
+		failWantMap()
680
+	}
681
+}
682
+
683
+func isMerge(n *node) bool {
684
+	return n.kind == scalarNode && n.value == "<<" && (n.implicit == true || n.tag == yaml_MERGE_TAG)
685
+}

+ 1032
- 0
vendor/src/gopkg.in/yaml.v2/decode_test.go
File diff suppressed because it is too large
View File


+ 1684
- 0
vendor/src/gopkg.in/yaml.v2/emitterc.go
File diff suppressed because it is too large
View File


+ 306
- 0
vendor/src/gopkg.in/yaml.v2/encode.go View File

@@ -0,0 +1,306 @@
1
+package yaml
2
+
3
+import (
4
+	"encoding"
5
+	"fmt"
6
+	"reflect"
7
+	"regexp"
8
+	"sort"
9
+	"strconv"
10
+	"strings"
11
+	"time"
12
+)
13
+
14
+type encoder struct {
15
+	emitter yaml_emitter_t
16
+	event   yaml_event_t
17
+	out     []byte
18
+	flow    bool
19
+}
20
+
21
+func newEncoder() (e *encoder) {
22
+	e = &encoder{}
23
+	e.must(yaml_emitter_initialize(&e.emitter))
24
+	yaml_emitter_set_output_string(&e.emitter, &e.out)
25
+	yaml_emitter_set_unicode(&e.emitter, true)
26
+	e.must(yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING))
27
+	e.emit()
28
+	e.must(yaml_document_start_event_initialize(&e.event, nil, nil, true))
29
+	e.emit()
30
+	return e
31
+}
32
+
33
+func (e *encoder) finish() {
34
+	e.must(yaml_document_end_event_initialize(&e.event, true))
35
+	e.emit()
36
+	e.emitter.open_ended = false
37
+	e.must(yaml_stream_end_event_initialize(&e.event))
38
+	e.emit()
39
+}
40
+
41
+func (e *encoder) destroy() {
42
+	yaml_emitter_delete(&e.emitter)
43
+}
44
+
45
+func (e *encoder) emit() {
46
+	// This will internally delete the e.event value.
47
+	if !yaml_emitter_emit(&e.emitter, &e.event) && e.event.typ != yaml_DOCUMENT_END_EVENT && e.event.typ != yaml_STREAM_END_EVENT {
48
+		e.must(false)
49
+	}
50
+}
51
+
52
+func (e *encoder) must(ok bool) {
53
+	if !ok {
54
+		msg := e.emitter.problem
55
+		if msg == "" {
56
+			msg = "unknown problem generating YAML content"
57
+		}
58
+		failf("%s", msg)
59
+	}
60
+}
61
+
62
+func (e *encoder) marshal(tag string, in reflect.Value) {
63
+	if !in.IsValid() {
64
+		e.nilv()
65
+		return
66
+	}
67
+	iface := in.Interface()
68
+	if m, ok := iface.(Marshaler); ok {
69
+		v, err := m.MarshalYAML()
70
+		if err != nil {
71
+			fail(err)
72
+		}
73
+		if v == nil {
74
+			e.nilv()
75
+			return
76
+		}
77
+		in = reflect.ValueOf(v)
78
+	} else if m, ok := iface.(encoding.TextMarshaler); ok {
79
+		text, err := m.MarshalText()
80
+		if err != nil {
81
+			fail(err)
82
+		}
83
+		in = reflect.ValueOf(string(text))
84
+	}
85
+	switch in.Kind() {
86
+	case reflect.Interface:
87
+		if in.IsNil() {
88
+			e.nilv()
89
+		} else {
90
+			e.marshal(tag, in.Elem())
91
+		}
92
+	case reflect.Map:
93
+		e.mapv(tag, in)
94
+	case reflect.Ptr:
95
+		if in.IsNil() {
96
+			e.nilv()
97
+		} else {
98
+			e.marshal(tag, in.Elem())
99
+		}
100
+	case reflect.Struct:
101
+		e.structv(tag, in)
102
+	case reflect.Slice:
103
+		if in.Type().Elem() == mapItemType {
104
+			e.itemsv(tag, in)
105
+		} else {
106
+			e.slicev(tag, in)
107
+		}
108
+	case reflect.String:
109
+		e.stringv(tag, in)
110
+	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
111
+		if in.Type() == durationType {
112
+			e.stringv(tag, reflect.ValueOf(iface.(time.Duration).String()))
113
+		} else {
114
+			e.intv(tag, in)
115
+		}
116
+	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
117
+		e.uintv(tag, in)
118
+	case reflect.Float32, reflect.Float64:
119
+		e.floatv(tag, in)
120
+	case reflect.Bool:
121
+		e.boolv(tag, in)
122
+	default:
123
+		panic("cannot marshal type: " + in.Type().String())
124
+	}
125
+}
126
+
127
+func (e *encoder) mapv(tag string, in reflect.Value) {
128
+	e.mappingv(tag, func() {
129
+		keys := keyList(in.MapKeys())
130
+		sort.Sort(keys)
131
+		for _, k := range keys {
132
+			e.marshal("", k)
133
+			e.marshal("", in.MapIndex(k))
134
+		}
135
+	})
136
+}
137
+
138
+func (e *encoder) itemsv(tag string, in reflect.Value) {
139
+	e.mappingv(tag, func() {
140
+		slice := in.Convert(reflect.TypeOf([]MapItem{})).Interface().([]MapItem)
141
+		for _, item := range slice {
142
+			e.marshal("", reflect.ValueOf(item.Key))
143
+			e.marshal("", reflect.ValueOf(item.Value))
144
+		}
145
+	})
146
+}
147
+
148
+func (e *encoder) structv(tag string, in reflect.Value) {
149
+	sinfo, err := getStructInfo(in.Type())
150
+	if err != nil {
151
+		panic(err)
152
+	}
153
+	e.mappingv(tag, func() {
154
+		for _, info := range sinfo.FieldsList {
155
+			var value reflect.Value
156
+			if info.Inline == nil {
157
+				value = in.Field(info.Num)
158
+			} else {
159
+				value = in.FieldByIndex(info.Inline)
160
+			}
161
+			if info.OmitEmpty && isZero(value) {
162
+				continue
163
+			}
164
+			e.marshal("", reflect.ValueOf(info.Key))
165
+			e.flow = info.Flow
166
+			e.marshal("", value)
167
+		}
168
+		if sinfo.InlineMap >= 0 {
169
+			m := in.Field(sinfo.InlineMap)
170
+			if m.Len() > 0 {
171
+				e.flow = false
172
+				keys := keyList(m.MapKeys())
173
+				sort.Sort(keys)
174
+				for _, k := range keys {
175
+					if _, found := sinfo.FieldsMap[k.String()]; found {
176
+						panic(fmt.Sprintf("Can't have key %q in inlined map; conflicts with struct field", k.String()))
177
+					}
178
+					e.marshal("", k)
179
+					e.flow = false
180
+					e.marshal("", m.MapIndex(k))
181
+				}
182
+			}
183
+		}
184
+	})
185
+}
186
+
187
+func (e *encoder) mappingv(tag string, f func()) {
188
+	implicit := tag == ""
189
+	style := yaml_BLOCK_MAPPING_STYLE
190
+	if e.flow {
191
+		e.flow = false
192
+		style = yaml_FLOW_MAPPING_STYLE
193
+	}
194
+	e.must(yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style))
195
+	e.emit()
196
+	f()
197
+	e.must(yaml_mapping_end_event_initialize(&e.event))
198
+	e.emit()
199
+}
200
+
201
+func (e *encoder) slicev(tag string, in reflect.Value) {
202
+	implicit := tag == ""
203
+	style := yaml_BLOCK_SEQUENCE_STYLE
204
+	if e.flow {
205
+		e.flow = false
206
+		style = yaml_FLOW_SEQUENCE_STYLE
207
+	}
208
+	e.must(yaml_sequence_start_event_initialize(&e.event, nil, []byte(tag), implicit, style))
209
+	e.emit()
210
+	n := in.Len()
211
+	for i := 0; i < n; i++ {
212
+		e.marshal("", in.Index(i))
213
+	}
214
+	e.must(yaml_sequence_end_event_initialize(&e.event))
215
+	e.emit()
216
+}
217
+
218
+// isBase60 returns whether s is in base 60 notation as defined in YAML 1.1.
219
+//
220
+// The base 60 float notation in YAML 1.1 is a terrible idea and is unsupported
221
+// in YAML 1.2 and by this package, but these should be marshalled quoted for
222
+// the time being for compatibility with other parsers.
223
+func isBase60Float(s string) (result bool) {
224
+	// Fast path.
225
+	if s == "" {
226
+		return false
227
+	}
228
+	c := s[0]
229
+	if !(c == '+' || c == '-' || c >= '0' && c <= '9') || strings.IndexByte(s, ':') < 0 {
230
+		return false
231
+	}
232
+	// Do the full match.
233
+	return base60float.MatchString(s)
234
+}
235
+
236
+// From http://yaml.org/type/float.html, except the regular expression there
237
+// is bogus. In practice parsers do not enforce the "\.[0-9_]*" suffix.
238
+var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0-9_]*)?$`)
239
+
240
+func (e *encoder) stringv(tag string, in reflect.Value) {
241
+	var style yaml_scalar_style_t
242
+	s := in.String()
243
+	rtag, rs := resolve("", s)
244
+	if rtag == yaml_BINARY_TAG {
245
+		if tag == "" || tag == yaml_STR_TAG {
246
+			tag = rtag
247
+			s = rs.(string)
248
+		} else if tag == yaml_BINARY_TAG {
249
+			failf("explicitly tagged !!binary data must be base64-encoded")
250
+		} else {
251
+			failf("cannot marshal invalid UTF-8 data as %s", shortTag(tag))
252
+		}
253
+	}
254
+	if tag == "" && (rtag != yaml_STR_TAG || isBase60Float(s)) {
255
+		style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
256
+	} else if strings.Contains(s, "\n") {
257
+		style = yaml_LITERAL_SCALAR_STYLE
258
+	} else {
259
+		style = yaml_PLAIN_SCALAR_STYLE
260
+	}
261
+	e.emitScalar(s, "", tag, style)
262
+}
263
+
264
+func (e *encoder) boolv(tag string, in reflect.Value) {
265
+	var s string
266
+	if in.Bool() {
267
+		s = "true"
268
+	} else {
269
+		s = "false"
270
+	}
271
+	e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
272
+}
273
+
274
+func (e *encoder) intv(tag string, in reflect.Value) {
275
+	s := strconv.FormatInt(in.Int(), 10)
276
+	e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
277
+}
278
+
279
+func (e *encoder) uintv(tag string, in reflect.Value) {
280
+	s := strconv.FormatUint(in.Uint(), 10)
281
+	e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
282
+}
283
+
284
+func (e *encoder) floatv(tag string, in reflect.Value) {
285
+	// FIXME: Handle 64 bits here.
286
+	s := strconv.FormatFloat(float64(in.Float()), 'g', -1, 32)
287
+	switch s {
288
+	case "+Inf":
289
+		s = ".inf"
290
+	case "-Inf":
291
+		s = "-.inf"
292
+	case "NaN":
293
+		s = ".nan"
294
+	}
295
+	e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
296
+}
297
+
298
+func (e *encoder) nilv() {
299
+	e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE)
300
+}
301
+
302
+func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) {
303
+	implicit := tag == ""
304
+	e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style))
305
+	e.emit()
306
+}

+ 501
- 0
vendor/src/gopkg.in/yaml.v2/encode_test.go View File

@@ -0,0 +1,501 @@
1
+package yaml_test
2
+
3
+import (
4
+	"fmt"
5
+	"math"
6
+	"strconv"
7
+	"strings"
8
+	"time"
9
+
10
+	. "gopkg.in/check.v1"
11
+	"gopkg.in/yaml.v2"
12
+	"net"
13
+	"os"
14
+)
15
+
16
+var marshalIntTest = 123
17
+
18
+var marshalTests = []struct {
19
+	value interface{}
20
+	data  string
21
+}{
22
+	{
23
+		nil,
24
+		"null\n",
25
+	}, {
26
+		&struct{}{},
27
+		"{}\n",
28
+	}, {
29
+		map[string]string{"v": "hi"},
30
+		"v: hi\n",
31
+	}, {
32
+		map[string]interface{}{"v": "hi"},
33
+		"v: hi\n",
34
+	}, {
35
+		map[string]string{"v": "true"},
36
+		"v: \"true\"\n",
37
+	}, {
38
+		map[string]string{"v": "false"},
39
+		"v: \"false\"\n",
40
+	}, {
41
+		map[string]interface{}{"v": true},
42
+		"v: true\n",
43
+	}, {
44
+		map[string]interface{}{"v": false},
45
+		"v: false\n",
46
+	}, {
47
+		map[string]interface{}{"v": 10},
48
+		"v: 10\n",
49
+	}, {
50
+		map[string]interface{}{"v": -10},
51
+		"v: -10\n",
52
+	}, {
53
+		map[string]uint{"v": 42},
54
+		"v: 42\n",
55
+	}, {
56
+		map[string]interface{}{"v": int64(4294967296)},
57
+		"v: 4294967296\n",
58
+	}, {
59
+		map[string]int64{"v": int64(4294967296)},
60
+		"v: 4294967296\n",
61
+	}, {
62
+		map[string]uint64{"v": 4294967296},
63
+		"v: 4294967296\n",
64
+	}, {
65
+		map[string]interface{}{"v": "10"},
66
+		"v: \"10\"\n",
67
+	}, {
68
+		map[string]interface{}{"v": 0.1},
69
+		"v: 0.1\n",
70
+	}, {
71
+		map[string]interface{}{"v": float64(0.1)},
72
+		"v: 0.1\n",
73
+	}, {
74
+		map[string]interface{}{"v": -0.1},
75
+		"v: -0.1\n",
76
+	}, {
77
+		map[string]interface{}{"v": math.Inf(+1)},
78
+		"v: .inf\n",
79
+	}, {
80
+		map[string]interface{}{"v": math.Inf(-1)},
81
+		"v: -.inf\n",
82
+	}, {
83
+		map[string]interface{}{"v": math.NaN()},
84
+		"v: .nan\n",
85
+	}, {
86
+		map[string]interface{}{"v": nil},
87
+		"v: null\n",
88
+	}, {
89
+		map[string]interface{}{"v": ""},
90
+		"v: \"\"\n",
91
+	}, {
92
+		map[string][]string{"v": []string{"A", "B"}},
93
+		"v:\n- A\n- B\n",
94
+	}, {
95
+		map[string][]string{"v": []string{"A", "B\nC"}},
96
+		"v:\n- A\n- |-\n  B\n  C\n",
97
+	}, {
98
+		map[string][]interface{}{"v": []interface{}{"A", 1, map[string][]int{"B": []int{2, 3}}}},
99
+		"v:\n- A\n- 1\n- B:\n  - 2\n  - 3\n",
100
+	}, {
101
+		map[string]interface{}{"a": map[interface{}]interface{}{"b": "c"}},
102
+		"a:\n  b: c\n",
103
+	}, {
104
+		map[string]interface{}{"a": "-"},
105
+		"a: '-'\n",
106
+	},
107
+
108
+	// Simple values.
109
+	{
110
+		&marshalIntTest,
111
+		"123\n",
112
+	},
113
+
114
+	// Structures
115
+	{
116
+		&struct{ Hello string }{"world"},
117
+		"hello: world\n",
118
+	}, {
119
+		&struct {
120
+			A struct {
121
+				B string
122
+			}
123
+		}{struct{ B string }{"c"}},
124
+		"a:\n  b: c\n",
125
+	}, {
126
+		&struct {
127
+			A *struct {
128
+				B string
129
+			}
130
+		}{&struct{ B string }{"c"}},
131
+		"a:\n  b: c\n",
132
+	}, {
133
+		&struct {
134
+			A *struct {
135
+				B string
136
+			}
137
+		}{},
138
+		"a: null\n",
139
+	}, {
140
+		&struct{ A int }{1},
141
+		"a: 1\n",
142
+	}, {
143
+		&struct{ A []int }{[]int{1, 2}},
144
+		"a:\n- 1\n- 2\n",
145
+	}, {
146
+		&struct {
147
+			B int "a"
148
+		}{1},
149
+		"a: 1\n",
150
+	}, {
151
+		&struct{ A bool }{true},
152
+		"a: true\n",
153
+	},
154
+
155
+	// Conditional flag
156
+	{
157
+		&struct {
158
+			A int "a,omitempty"
159
+			B int "b,omitempty"
160
+		}{1, 0},
161
+		"a: 1\n",
162
+	}, {
163
+		&struct {
164
+			A int "a,omitempty"
165
+			B int "b,omitempty"
166
+		}{0, 0},
167
+		"{}\n",
168
+	}, {
169
+		&struct {
170
+			A *struct{ X, y int } "a,omitempty,flow"
171
+		}{&struct{ X, y int }{1, 2}},
172
+		"a: {x: 1}\n",
173
+	}, {
174
+		&struct {
175
+			A *struct{ X, y int } "a,omitempty,flow"
176
+		}{nil},
177
+		"{}\n",
178
+	}, {
179
+		&struct {
180
+			A *struct{ X, y int } "a,omitempty,flow"
181
+		}{&struct{ X, y int }{}},
182
+		"a: {x: 0}\n",
183
+	}, {
184
+		&struct {
185
+			A struct{ X, y int } "a,omitempty,flow"
186
+		}{struct{ X, y int }{1, 2}},
187
+		"a: {x: 1}\n",
188
+	}, {
189
+		&struct {
190
+			A struct{ X, y int } "a,omitempty,flow"
191
+		}{struct{ X, y int }{0, 1}},
192
+		"{}\n",
193
+	}, {
194
+		&struct {
195
+			A float64 "a,omitempty"
196
+			B float64 "b,omitempty"
197
+		}{1, 0},
198
+		"a: 1\n",
199
+	},
200
+
201
+	// Flow flag
202
+	{
203
+		&struct {
204
+			A []int "a,flow"
205
+		}{[]int{1, 2}},
206
+		"a: [1, 2]\n",
207
+	}, {
208
+		&struct {
209
+			A map[string]string "a,flow"
210
+		}{map[string]string{"b": "c", "d": "e"}},
211
+		"a: {b: c, d: e}\n",
212
+	}, {
213
+		&struct {
214
+			A struct {
215
+				B, D string
216
+			} "a,flow"
217
+		}{struct{ B, D string }{"c", "e"}},
218
+		"a: {b: c, d: e}\n",
219
+	},
220
+
221
+	// Unexported field
222
+	{
223
+		&struct {
224
+			u int
225
+			A int
226
+		}{0, 1},
227
+		"a: 1\n",
228
+	},
229
+
230
+	// Ignored field
231
+	{
232
+		&struct {
233
+			A int
234
+			B int "-"
235
+		}{1, 2},
236
+		"a: 1\n",
237
+	},
238
+
239
+	// Struct inlining
240
+	{
241
+		&struct {
242
+			A int
243
+			C inlineB `yaml:",inline"`
244
+		}{1, inlineB{2, inlineC{3}}},
245
+		"a: 1\nb: 2\nc: 3\n",
246
+	},
247
+
248
+	// Map inlining
249
+	{
250
+		&struct {
251
+			A int
252
+			C map[string]int `yaml:",inline"`
253
+		}{1, map[string]int{"b": 2, "c": 3}},
254
+		"a: 1\nb: 2\nc: 3\n",
255
+	},
256
+
257
+	// Duration
258
+	{
259
+		map[string]time.Duration{"a": 3 * time.Second},
260
+		"a: 3s\n",
261
+	},
262
+
263
+	// Issue #24: bug in map merging logic.
264
+	{
265
+		map[string]string{"a": "<foo>"},
266
+		"a: <foo>\n",
267
+	},
268
+
269
+	// Issue #34: marshal unsupported base 60 floats quoted for compatibility
270
+	// with old YAML 1.1 parsers.
271
+	{
272
+		map[string]string{"a": "1:1"},
273
+		"a: \"1:1\"\n",
274
+	},
275
+
276
+	// Binary data.
277
+	{
278
+		map[string]string{"a": "\x00"},
279
+		"a: \"\\0\"\n",
280
+	}, {
281
+		map[string]string{"a": "\x80\x81\x82"},
282
+		"a: !!binary gIGC\n",
283
+	}, {
284
+		map[string]string{"a": strings.Repeat("\x90", 54)},
285
+		"a: !!binary |\n  " + strings.Repeat("kJCQ", 17) + "kJ\n  CQ\n",
286
+	},
287
+
288
+	// Ordered maps.
289
+	{
290
+		&yaml.MapSlice{{"b", 2}, {"a", 1}, {"d", 4}, {"c", 3}, {"sub", yaml.MapSlice{{"e", 5}}}},
291
+		"b: 2\na: 1\nd: 4\nc: 3\nsub:\n  e: 5\n",
292
+	},
293
+
294
+	// Encode unicode as utf-8 rather than in escaped form.
295
+	{
296
+		map[string]string{"a": "你好"},
297
+		"a: 你好\n",
298
+	},
299
+
300
+	// Support encoding.TextMarshaler.
301
+	{
302
+		map[string]net.IP{"a": net.IPv4(1, 2, 3, 4)},
303
+		"a: 1.2.3.4\n",
304
+	},
305
+	{
306
+		map[string]time.Time{"a": time.Unix(1424801979, 0)},
307
+		"a: 2015-02-24T18:19:39Z\n",
308
+	},
309
+
310
+	// Ensure strings containing ": " are quoted (reported as PR #43, but not reproducible).
311
+	{
312
+		map[string]string{"a": "b: c"},
313
+		"a: 'b: c'\n",
314
+	},
315
+
316
+	// Containing hash mark ('#') in string should be quoted
317
+	{
318
+		map[string]string{"a": "Hello #comment"},
319
+		"a: 'Hello #comment'\n",
320
+	},
321
+	{
322
+		map[string]string{"a": "你好 #comment"},
323
+		"a: '你好 #comment'\n",
324
+	},
325
+}
326
+
327
+func (s *S) TestMarshal(c *C) {
328
+	defer os.Setenv("TZ", os.Getenv("TZ"))
329
+	os.Setenv("TZ", "UTC")
330
+	for _, item := range marshalTests {
331
+		data, err := yaml.Marshal(item.value)
332
+		c.Assert(err, IsNil)
333
+		c.Assert(string(data), Equals, item.data)
334
+	}
335
+}
336
+
337
+var marshalErrorTests = []struct {
338
+	value interface{}
339
+	error string
340
+	panic string
341
+}{{
342
+	value: &struct {
343
+		B       int
344
+		inlineB ",inline"
345
+	}{1, inlineB{2, inlineC{3}}},
346
+	panic: `Duplicated key 'b' in struct struct \{ B int; .*`,
347
+}, {
348
+	value: &struct {
349
+		A int
350
+		B map[string]int ",inline"
351
+	}{1, map[string]int{"a": 2}},
352
+	panic: `Can't have key "a" in inlined map; conflicts with struct field`,
353
+}}
354
+
355
+func (s *S) TestMarshalErrors(c *C) {
356
+	for _, item := range marshalErrorTests {
357
+		if item.panic != "" {
358
+			c.Assert(func() { yaml.Marshal(item.value) }, PanicMatches, item.panic)
359
+		} else {
360
+			_, err := yaml.Marshal(item.value)
361
+			c.Assert(err, ErrorMatches, item.error)
362
+		}
363
+	}
364
+}
365
+
366
+func (s *S) TestMarshalTypeCache(c *C) {
367
+	var data []byte
368
+	var err error
369
+	func() {
370
+		type T struct{ A int }
371
+		data, err = yaml.Marshal(&T{})
372
+		c.Assert(err, IsNil)
373
+	}()
374
+	func() {
375
+		type T struct{ B int }
376
+		data, err = yaml.Marshal(&T{})
377
+		c.Assert(err, IsNil)
378
+	}()
379
+	c.Assert(string(data), Equals, "b: 0\n")
380
+}
381
+
382
+var marshalerTests = []struct {
383
+	data  string
384
+	value interface{}
385
+}{
386
+	{"_:\n  hi: there\n", map[interface{}]interface{}{"hi": "there"}},
387
+	{"_:\n- 1\n- A\n", []interface{}{1, "A"}},
388
+	{"_: 10\n", 10},
389
+	{"_: null\n", nil},
390
+	{"_: BAR!\n", "BAR!"},
391
+}
392
+
393
+type marshalerType struct {
394
+	value interface{}
395
+}
396
+
397
+func (o marshalerType) MarshalText() ([]byte, error) {
398
+	panic("MarshalText called on type with MarshalYAML")
399
+}
400
+
401
+func (o marshalerType) MarshalYAML() (interface{}, error) {
402
+	return o.value, nil
403
+}
404
+
405
+type marshalerValue struct {
406
+	Field marshalerType "_"
407
+}
408
+
409
+func (s *S) TestMarshaler(c *C) {
410
+	for _, item := range marshalerTests {
411
+		obj := &marshalerValue{}
412
+		obj.Field.value = item.value
413
+		data, err := yaml.Marshal(obj)
414
+		c.Assert(err, IsNil)
415
+		c.Assert(string(data), Equals, string(item.data))
416
+	}
417
+}
418
+
419
+func (s *S) TestMarshalerWholeDocument(c *C) {
420
+	obj := &marshalerType{}
421
+	obj.value = map[string]string{"hello": "world!"}
422
+	data, err := yaml.Marshal(obj)
423
+	c.Assert(err, IsNil)