Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
gaoqiong
yaml-cpp
Commits
3355bbb3
Commit
3355bbb3
authored
Mar 22, 2014
by
Jesse Beder
Browse files
Merge clang-format from core
parents
5b889311
9b4db068
Changes
72
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
4153 additions
and
4224 deletions
+4153
-4224
src/scanner.h
src/scanner.h
+125
-115
src/scanscalar.cpp
src/scanscalar.cpp
+211
-207
src/scanscalar.h
src/scanscalar.h
+60
-33
src/scantag.cpp
src/scantag.cpp
+72
-77
src/scantag.h
src/scantag.h
+8
-9
src/scantoken.cpp
src/scantoken.cpp
+422
-429
src/setting.h
src/setting.h
+87
-93
src/simplekey.cpp
src/simplekey.cpp
+121
-132
src/singledocparser.cpp
src/singledocparser.cpp
+390
-381
src/singledocparser.h
src/singledocparser.h
+48
-49
src/stream.cpp
src/stream.cpp
+432
-435
src/stream.h
src/stream.h
+59
-56
src/streamcharsource.h
src/streamcharsource.h
+37
-37
src/stringsource.h
src/stringsource.h
+38
-37
src/tag.cpp
src/tag.cpp
+40
-44
src/tag.h
src/tag.h
+22
-17
src/token.h
src/token.h
+57
-68
test/core/parsertests.cpp
test/core/parsertests.cpp
+69
-70
test/core/spectests.cpp
test/core/spectests.cpp
+1850
-1931
test/emittertests.h
test/emittertests.h
+5
-4
No files found.
src/scanner.h
View file @
3355bbb3
#ifndef SCANNER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#ifndef SCANNER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define SCANNER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define SCANNER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#pragma once
#endif
#endif
#include <ios>
#include <ios>
#include <string>
#include <string>
#include <queue>
#include <queue>
...
@@ -16,118 +17,127 @@
...
@@ -16,118 +17,127 @@
#include "stream.h"
#include "stream.h"
#include "token.h"
#include "token.h"
namespace
YAML
namespace
YAML
{
{
class
Node
;
class
Node
;
class
RegEx
;
class
RegEx
;
class
Scanner
{
class
Scanner
public:
{
Scanner
(
std
::
istream
&
in
);
public:
~
Scanner
();
Scanner
(
std
::
istream
&
in
);
~
Scanner
();
// token queue management (hopefully this looks kinda stl-ish)
bool
empty
();
// token queue management (hopefully this looks kinda stl-ish)
void
pop
();
bool
empty
();
Token
&
peek
();
void
pop
();
Mark
mark
()
const
;
Token
&
peek
();
Mark
mark
()
const
;
private:
struct
IndentMarker
{
private:
enum
INDENT_TYPE
{
struct
IndentMarker
{
MAP
,
enum
INDENT_TYPE
{
MAP
,
SEQ
,
NONE
};
SEQ
,
enum
STATUS
{
VALID
,
INVALID
,
UNKNOWN
};
NONE
IndentMarker
(
int
column_
,
INDENT_TYPE
type_
)
:
column
(
column_
),
type
(
type_
),
status
(
VALID
),
pStartToken
(
0
)
{}
};
enum
STATUS
{
int
column
;
VALID
,
INDENT_TYPE
type
;
INVALID
,
STATUS
status
;
UNKNOWN
Token
*
pStartToken
;
};
};
IndentMarker
(
int
column_
,
INDENT_TYPE
type_
)
:
column
(
column_
),
type
(
type_
),
status
(
VALID
),
pStartToken
(
0
)
{}
enum
FLOW_MARKER
{
FLOW_MAP
,
FLOW_SEQ
};
int
column
;
private:
INDENT_TYPE
type
;
// scanning
STATUS
status
;
void
EnsureTokensInQueue
();
Token
*
pStartToken
;
void
ScanNextToken
();
};
void
ScanToNextToken
();
void
StartStream
();
enum
FLOW_MARKER
{
void
EndStream
();
FLOW_MAP
,
Token
*
PushToken
(
Token
::
TYPE
type
);
FLOW_SEQ
};
bool
InFlowContext
()
const
{
return
!
m_flows
.
empty
();
}
bool
InBlockContext
()
const
{
return
m_flows
.
empty
();
}
private:
int
GetFlowLevel
()
const
{
return
m_flows
.
size
();
}
// scanning
void
EnsureTokensInQueue
();
Token
::
TYPE
GetStartTokenFor
(
IndentMarker
::
INDENT_TYPE
type
)
const
;
void
ScanNextToken
();
IndentMarker
*
PushIndentTo
(
int
column
,
IndentMarker
::
INDENT_TYPE
type
);
void
ScanToNextToken
();
void
PopIndentToHere
();
void
StartStream
();
void
PopAllIndents
();
void
EndStream
();
void
PopIndent
();
Token
*
PushToken
(
Token
::
TYPE
type
);
int
GetTopIndent
()
const
;
bool
InFlowContext
()
const
{
return
!
m_flows
.
empty
();
}
// checking input
bool
InBlockContext
()
const
{
return
m_flows
.
empty
();
}
bool
CanInsertPotentialSimpleKey
()
const
;
int
GetFlowLevel
()
const
{
return
m_flows
.
size
();
}
bool
ExistsActiveSimpleKey
()
const
;
void
InsertPotentialSimpleKey
();
Token
::
TYPE
GetStartTokenFor
(
IndentMarker
::
INDENT_TYPE
type
)
const
;
void
InvalidateSimpleKey
();
IndentMarker
*
PushIndentTo
(
int
column
,
IndentMarker
::
INDENT_TYPE
type
);
bool
VerifySimpleKey
();
void
PopIndentToHere
();
void
PopAllSimpleKeys
();
void
PopAllIndents
();
void
PopIndent
();
void
ThrowParserException
(
const
std
::
string
&
msg
)
const
;
int
GetTopIndent
()
const
;
bool
IsWhitespaceToBeEaten
(
char
ch
);
// checking input
const
RegEx
&
GetValueRegex
()
const
;
bool
CanInsertPotentialSimpleKey
()
const
;
bool
ExistsActiveSimpleKey
()
const
;
struct
SimpleKey
{
void
InsertPotentialSimpleKey
();
SimpleKey
(
const
Mark
&
mark_
,
int
flowLevel_
);
void
InvalidateSimpleKey
();
bool
VerifySimpleKey
();
void
Validate
();
void
PopAllSimpleKeys
();
void
Invalidate
();
void
ThrowParserException
(
const
std
::
string
&
msg
)
const
;
Mark
mark
;
int
flowLevel
;
bool
IsWhitespaceToBeEaten
(
char
ch
);
IndentMarker
*
pIndent
;
const
RegEx
&
GetValueRegex
()
const
;
Token
*
pMapStart
,
*
pKey
;
};
struct
SimpleKey
{
SimpleKey
(
const
Mark
&
mark_
,
int
flowLevel_
);
// and the tokens
void
ScanDirective
();
void
Validate
();
void
ScanDocStart
();
void
Invalidate
();
void
ScanDocEnd
();
void
ScanBlockSeqStart
();
Mark
mark
;
void
ScanBlockMapSTart
();
int
flowLevel
;
void
ScanBlockEnd
();
IndentMarker
*
pIndent
;
void
ScanBlockEntry
();
Token
*
pMapStart
,
*
pKey
;
void
ScanFlowStart
();
};
void
ScanFlowEnd
();
void
ScanFlowEntry
();
// and the tokens
void
ScanKey
();
void
ScanDirective
();
void
ScanValue
();
void
ScanDocStart
();
void
ScanAnchorOrAlias
();
void
ScanDocEnd
();
void
ScanTag
();
void
ScanBlockSeqStart
();
void
ScanPlainScalar
();
void
ScanBlockMapSTart
();
void
ScanQuotedScalar
();
void
ScanBlockEnd
();
void
ScanBlockScalar
();
void
ScanBlockEntry
();
void
ScanFlowStart
();
private:
void
ScanFlowEnd
();
// the stream
void
ScanFlowEntry
();
Stream
INPUT
;
void
ScanKey
();
void
ScanValue
();
// the output (tokens)
void
ScanAnchorOrAlias
();
std
::
queue
<
Token
>
m_tokens
;
void
ScanTag
();
void
ScanPlainScalar
();
// state info
void
ScanQuotedScalar
();
bool
m_startedStream
,
m_endedStream
;
void
ScanBlockScalar
();
bool
m_simpleKeyAllowed
;
bool
m_canBeJSONFlow
;
private:
std
::
stack
<
SimpleKey
>
m_simpleKeys
;
// the stream
std
::
stack
<
IndentMarker
*>
m_indents
;
Stream
INPUT
;
ptr_vector
<
IndentMarker
>
m_indentRefs
;
// for "garbage collection"
std
::
stack
<
FLOW_MARKER
>
m_flows
;
// the output (tokens)
};
std
::
queue
<
Token
>
m_tokens
;
// state info
bool
m_startedStream
,
m_endedStream
;
bool
m_simpleKeyAllowed
;
bool
m_canBeJSONFlow
;
std
::
stack
<
SimpleKey
>
m_simpleKeys
;
std
::
stack
<
IndentMarker
*>
m_indents
;
ptr_vector
<
IndentMarker
>
m_indentRefs
;
// for "garbage collection"
std
::
stack
<
FLOW_MARKER
>
m_flows
;
};
}
}
#endif // SCANNER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif // SCANNER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/scanscalar.cpp
View file @
3355bbb3
...
@@ -4,211 +4,215 @@
...
@@ -4,211 +4,215 @@
#include "yaml-cpp/exceptions.h"
#include "yaml-cpp/exceptions.h"
#include "token.h"
#include "token.h"
namespace
YAML
namespace
YAML
{
{
// ScanScalar
// ScanScalar
// . This is where the scalar magic happens.
// . This is where the scalar magic happens.
//
//
// . We do the scanning in three phases:
// . We do the scanning in three phases:
// 1. Scan until newline
// 1. Scan until newline
// 2. Eat newline
// 2. Eat newline
// 3. Scan leading blanks.
// 3. Scan leading blanks.
//
//
// . Depending on the parameters given, we store or stop
// . Depending on the parameters given, we store or stop
// and different places in the above flow.
// and different places in the above flow.
std
::
string
ScanScalar
(
Stream
&
INPUT
,
ScanScalarParams
&
params
)
{
std
::
string
ScanScalar
(
Stream
&
INPUT
,
ScanScalarParams
&
params
)
bool
foundNonEmptyLine
=
false
;
{
bool
pastOpeningBreak
=
(
params
.
fold
==
FOLD_FLOW
);
bool
foundNonEmptyLine
=
false
;
bool
emptyLine
=
false
,
moreIndented
=
false
;
bool
pastOpeningBreak
=
(
params
.
fold
==
FOLD_FLOW
);
int
foldedNewlineCount
=
0
;
bool
emptyLine
=
false
,
moreIndented
=
false
;
bool
foldedNewlineStartedMoreIndented
=
false
;
int
foldedNewlineCount
=
0
;
std
::
size_t
lastEscapedChar
=
std
::
string
::
npos
;
bool
foldedNewlineStartedMoreIndented
=
false
;
std
::
string
scalar
;
std
::
size_t
lastEscapedChar
=
std
::
string
::
npos
;
params
.
leadingSpaces
=
false
;
std
::
string
scalar
;
params
.
leadingSpaces
=
false
;
while
(
INPUT
)
{
// ********************************
while
(
INPUT
)
{
// Phase #1: scan until line ending
// ********************************
// Phase #1: scan until line ending
std
::
size_t
lastNonWhitespaceChar
=
scalar
.
size
();
bool
escapedNewline
=
false
;
std
::
size_t
lastNonWhitespaceChar
=
scalar
.
size
();
while
(
!
params
.
end
.
Matches
(
INPUT
)
&&
!
Exp
::
Break
().
Matches
(
INPUT
))
{
bool
escapedNewline
=
false
;
if
(
!
INPUT
)
while
(
!
params
.
end
.
Matches
(
INPUT
)
&&
!
Exp
::
Break
().
Matches
(
INPUT
))
{
break
;
if
(
!
INPUT
)
break
;
// document indicator?
if
(
INPUT
.
column
()
==
0
&&
Exp
::
DocIndicator
().
Matches
(
INPUT
))
{
// document indicator?
if
(
params
.
onDocIndicator
==
BREAK
)
if
(
INPUT
.
column
()
==
0
&&
Exp
::
DocIndicator
().
Matches
(
INPUT
))
{
break
;
if
(
params
.
onDocIndicator
==
BREAK
)
else
if
(
params
.
onDocIndicator
==
THROW
)
break
;
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
DOC_IN_SCALAR
);
else
if
(
params
.
onDocIndicator
==
THROW
)
}
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
DOC_IN_SCALAR
);
}
foundNonEmptyLine
=
true
;
pastOpeningBreak
=
true
;
foundNonEmptyLine
=
true
;
pastOpeningBreak
=
true
;
// escaped newline? (only if we're escaping on slash)
if
(
params
.
escape
==
'\\'
&&
Exp
::
EscBreak
().
Matches
(
INPUT
))
{
// escaped newline? (only if we're escaping on slash)
// eat escape character and get out (but preserve trailing whitespace!)
if
(
params
.
escape
==
'\\'
&&
Exp
::
EscBreak
().
Matches
(
INPUT
))
{
INPUT
.
get
();
// eat escape character and get out (but preserve trailing whitespace!)
lastNonWhitespaceChar
=
scalar
.
size
();
INPUT
.
get
();
lastEscapedChar
=
scalar
.
size
();
lastNonWhitespaceChar
=
scalar
.
size
();
escapedNewline
=
true
;
lastEscapedChar
=
scalar
.
size
();
break
;
escapedNewline
=
true
;
}
break
;
}
// escape this?
if
(
INPUT
.
peek
()
==
params
.
escape
)
{
// escape this?
scalar
+=
Exp
::
Escape
(
INPUT
);
if
(
INPUT
.
peek
()
==
params
.
escape
)
{
lastNonWhitespaceChar
=
scalar
.
size
();
scalar
+=
Exp
::
Escape
(
INPUT
);
lastEscapedChar
=
scalar
.
size
();
lastNonWhitespaceChar
=
scalar
.
size
();
continue
;
lastEscapedChar
=
scalar
.
size
();
}
continue
;
}
// otherwise, just add the damn character
char
ch
=
INPUT
.
get
();
// otherwise, just add the damn character
scalar
+=
ch
;
char
ch
=
INPUT
.
get
();
if
(
ch
!=
' '
&&
ch
!=
'\t'
)
scalar
+=
ch
;
lastNonWhitespaceChar
=
scalar
.
size
();
if
(
ch
!=
' '
&&
ch
!=
'\t'
)
}
lastNonWhitespaceChar
=
scalar
.
size
();
}
// eof? if we're looking to eat something, then we throw
if
(
!
INPUT
)
{
// eof? if we're looking to eat something, then we throw
if
(
params
.
eatEnd
)
if
(
!
INPUT
)
{
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
EOF_IN_SCALAR
);
if
(
params
.
eatEnd
)
break
;
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
EOF_IN_SCALAR
);
}
break
;
}
// doc indicator?
if
(
params
.
onDocIndicator
==
BREAK
&&
INPUT
.
column
()
==
0
&&
// doc indicator?
Exp
::
DocIndicator
().
Matches
(
INPUT
))
if
(
params
.
onDocIndicator
==
BREAK
&&
INPUT
.
column
()
==
0
&&
Exp
::
DocIndicator
().
Matches
(
INPUT
))
break
;
break
;
// are we done via character match?
// are we done via character match?
int
n
=
params
.
end
.
Match
(
INPUT
);
int
n
=
params
.
end
.
Match
(
INPUT
);
if
(
n
>=
0
)
{
if
(
n
>=
0
)
{
if
(
params
.
eatEnd
)
if
(
params
.
eatEnd
)
INPUT
.
eat
(
n
);
INPUT
.
eat
(
n
);
break
;
break
;
}
}
// do we remove trailing whitespace?
// do we remove trailing whitespace?
if
(
params
.
fold
==
FOLD_FLOW
)
if
(
params
.
fold
==
FOLD_FLOW
)
scalar
.
erase
(
lastNonWhitespaceChar
);
scalar
.
erase
(
lastNonWhitespaceChar
);
// ********************************
// ********************************
// Phase #2: eat line ending
// Phase #2: eat line ending
n
=
Exp
::
Break
().
Match
(
INPUT
);
n
=
Exp
::
Break
().
Match
(
INPUT
);
INPUT
.
eat
(
n
);
INPUT
.
eat
(
n
);
// ********************************
// ********************************
// Phase #3: scan initial spaces
// Phase #3: scan initial spaces
// first the required indentation
// first the required indentation
while
(
INPUT
.
peek
()
==
' '
&&
(
INPUT
.
column
()
<
params
.
indent
||
while
(
INPUT
.
peek
()
==
' '
&&
(
INPUT
.
column
()
<
params
.
indent
||
(
params
.
detectIndent
&&
!
foundNonEmptyLine
)))
(
params
.
detectIndent
&&
!
foundNonEmptyLine
)))
INPUT
.
eat
(
1
);
INPUT
.
eat
(
1
);
// update indent if we're auto-detecting
// update indent if we're auto-detecting
if
(
params
.
detectIndent
&&
!
foundNonEmptyLine
)
if
(
params
.
detectIndent
&&
!
foundNonEmptyLine
)
params
.
indent
=
std
::
max
(
params
.
indent
,
INPUT
.
column
());
params
.
indent
=
std
::
max
(
params
.
indent
,
INPUT
.
column
());
// and then the rest of the whitespace
// and then the rest of the whitespace
while
(
Exp
::
Blank
().
Matches
(
INPUT
))
{
while
(
Exp
::
Blank
().
Matches
(
INPUT
))
{
// we check for tabs that masquerade as indentation
// we check for tabs that masquerade as indentation
if
(
INPUT
.
peek
()
==
'\t'
&&
INPUT
.
column
()
<
params
.
indent
&&
params
.
onTabInIndentation
==
THROW
)
if
(
INPUT
.
peek
()
==
'\t'
&&
INPUT
.
column
()
<
params
.
indent
&&
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
TAB_IN_INDENTATION
);
params
.
onTabInIndentation
==
THROW
)
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
TAB_IN_INDENTATION
);
if
(
!
params
.
eatLeadingWhitespace
)
break
;
if
(
!
params
.
eatLeadingWhitespace
)
break
;
INPUT
.
eat
(
1
);
}
INPUT
.
eat
(
1
);
}
// was this an empty line?
bool
nextEmptyLine
=
Exp
::
Break
().
Matches
(
INPUT
);
// was this an empty line?
bool
nextMoreIndented
=
Exp
::
Blank
().
Matches
(
INPUT
);
bool
nextEmptyLine
=
Exp
::
Break
().
Matches
(
INPUT
);
if
(
params
.
fold
==
FOLD_BLOCK
&&
foldedNewlineCount
==
0
&&
nextEmptyLine
)
bool
nextMoreIndented
=
Exp
::
Blank
().
Matches
(
INPUT
);
foldedNewlineStartedMoreIndented
=
moreIndented
;
if
(
params
.
fold
==
FOLD_BLOCK
&&
foldedNewlineCount
==
0
&&
nextEmptyLine
)
foldedNewlineStartedMoreIndented
=
moreIndented
;
// for block scalars, we always start with a newline, so we should ignore it (not fold or keep)
if
(
pastOpeningBreak
)
{
// for block scalars, we always start with a newline, so we should ignore it
switch
(
params
.
fold
)
{
// (not fold or keep)
case
DONT_FOLD
:
if
(
pastOpeningBreak
)
{
scalar
+=
"
\n
"
;
switch
(
params
.
fold
)
{
break
;
case
DONT_FOLD
:
case
FOLD_BLOCK
:
scalar
+=
"
\n
"
;
if
(
!
emptyLine
&&
!
nextEmptyLine
&&
!
moreIndented
&&
!
nextMoreIndented
&&
INPUT
.
column
()
>=
params
.
indent
)
break
;
scalar
+=
" "
;
case
FOLD_BLOCK
:
else
if
(
nextEmptyLine
)
if
(
!
emptyLine
&&
!
nextEmptyLine
&&
!
moreIndented
&&
foldedNewlineCount
++
;
!
nextMoreIndented
&&
INPUT
.
column
()
>=
params
.
indent
)
else
scalar
+=
" "
;
scalar
+=
"
\n
"
;
else
if
(
nextEmptyLine
)
foldedNewlineCount
++
;
if
(
!
nextEmptyLine
&&
foldedNewlineCount
>
0
)
{
else
scalar
+=
std
::
string
(
foldedNewlineCount
-
1
,
'\n'
);
scalar
+=
"
\n
"
;
if
(
foldedNewlineStartedMoreIndented
||
nextMoreIndented
|
!
foundNonEmptyLine
)
scalar
+=
"
\n
"
;
if
(
!
nextEmptyLine
&&
foldedNewlineCount
>
0
)
{
foldedNewlineCount
=
0
;
scalar
+=
std
::
string
(
foldedNewlineCount
-
1
,
'\n'
);
}
if
(
foldedNewlineStartedMoreIndented
||
break
;
nextMoreIndented
|
!
foundNonEmptyLine
)
case
FOLD_FLOW
:
scalar
+=
"
\n
"
;
if
(
nextEmptyLine
)
foldedNewlineCount
=
0
;
scalar
+=
"
\n
"
;
}
else
if
(
!
emptyLine
&&
!
nextEmptyLine
&&
!
escapedNewline
)
break
;
scalar
+=
" "
;
case
FOLD_FLOW
:
break
;
if
(
nextEmptyLine
)
}
scalar
+=
"
\n
"
;
}
else
if
(
!
emptyLine
&&
!
nextEmptyLine
&&
!
escapedNewline
)
scalar
+=
" "
;
emptyLine
=
nextEmptyLine
;
break
;
moreIndented
=
nextMoreIndented
;
}
pastOpeningBreak
=
true
;
}
// are we done via indentation?
emptyLine
=
nextEmptyLine
;
if
(
!
emptyLine
&&
INPUT
.
column
()
<
params
.
indent
)
{
moreIndented
=
nextMoreIndented
;
params
.
leadingSpaces
=
true
;
pastOpeningBreak
=
true
;
break
;
}
// are we done via indentation?
}
if
(
!
emptyLine
&&
INPUT
.
column
()
<
params
.
indent
)
{
params
.
leadingSpaces
=
true
;
// post-processing
break
;
if
(
params
.
trimTrailingSpaces
)
{
}
std
::
size_t
pos
=
scalar
.
find_last_not_of
(
' '
);
}
if
(
lastEscapedChar
!=
std
::
string
::
npos
)
{
if
(
pos
<
lastEscapedChar
||
pos
==
std
::
string
::
npos
)
// post-processing
pos
=
lastEscapedChar
;
if
(
params
.
trimTrailingSpaces
)
{
}
std
::
size_t
pos
=
scalar
.
find_last_not_of
(
' '
);
if
(
pos
<
scalar
.
size
())
if
(
lastEscapedChar
!=
std
::
string
::
npos
)
{
scalar
.
erase
(
pos
+
1
);
if
(
pos
<
lastEscapedChar
||
pos
==
std
::
string
::
npos
)
}
pos
=
lastEscapedChar
;
}
switch
(
params
.
chomp
)
{
if
(
pos
<
scalar
.
size
())
case
CLIP
:
{
scalar
.
erase
(
pos
+
1
);
std
::
size_t
pos
=
scalar
.
find_last_not_of
(
'\n'
);
}
if
(
lastEscapedChar
!=
std
::
string
::
npos
)
{
if
(
pos
<
lastEscapedChar
||
pos
==
std
::
string
::
npos
)
switch
(
params
.
chomp
)
{
pos
=
lastEscapedChar
;
case
CLIP
:
{
}
std
::
size_t
pos
=
scalar
.
find_last_not_of
(
'\n'
);
if
(
pos
==
std
::
string
::
npos
)
if
(
lastEscapedChar
!=
std
::
string
::
npos
)
{
scalar
.
erase
();
if
(
pos
<
lastEscapedChar
||
pos
==
std
::
string
::
npos
)
else
if
(
pos
+
1
<
scalar
.
size
())
pos
=
lastEscapedChar
;
scalar
.
erase
(
pos
+
2
);
}
}
break
;
if
(
pos
==
std
::
string
::
npos
)
case
STRIP
:
{
scalar
.
erase
();
std
::
size_t
pos
=
scalar
.
find_last_not_of
(
'\n'
);
else
if
(
pos
+
1
<
scalar
.
size
())
if
(
lastEscapedChar
!=
std
::
string
::
npos
)
{
scalar
.
erase
(
pos
+
2
);
if
(
pos
<
lastEscapedChar
||
pos
==
std
::
string
::
npos
)
}
break
;
pos
=
lastEscapedChar
;
case
STRIP
:
{
}
std
::
size_t
pos
=
scalar
.
find_last_not_of
(
'\n'
);
if
(
pos
==
std
::
string
::
npos
)
if
(
lastEscapedChar
!=
std
::
string
::
npos
)
{
scalar
.
erase
();
if
(
pos
<
lastEscapedChar
||
pos
==
std
::
string
::
npos
)
else
if
(
pos
<
scalar
.
size
())
pos
=
lastEscapedChar
;
scalar
.
erase
(
pos
+
1
);
}
}
break
;
if
(
pos
==
std
::
string
::
npos
)
default:
scalar
.
erase
();
break
;
else
if
(
pos
<
scalar
.
size
())
}
scalar
.
erase
(
pos
+
1
);
}
break
;
return
scalar
;
default:
}
break
;
}
return
scalar
;
}
}
}
src/scanscalar.h
View file @
3355bbb3
#ifndef SCANSCALAR_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#ifndef SCANSCALAR_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define SCANSCALAR_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define SCANSCALAR_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#pragma once
#endif
#endif
#include <string>
#include <string>
#include "regex.h"
#include "regex.h"
#include "stream.h"
#include "stream.h"
namespace
YAML
namespace
YAML
{
{
enum
CHOMP
{
enum
CHOMP
{
STRIP
=
-
1
,
CLIP
,
KEEP
};
STRIP
=
-
1
,
enum
ACTION
{
NONE
,
BREAK
,
THROW
};
CLIP
,
enum
FOLD
{
DONT_FOLD
,
FOLD_BLOCK
,
FOLD_FLOW
};
KEEP
};
struct
ScanScalarParams
{
enum
ACTION
{
ScanScalarParams
()
:
eatEnd
(
false
),
indent
(
0
),
detectIndent
(
false
),
eatLeadingWhitespace
(
0
),
escape
(
0
),
fold
(
DONT_FOLD
)
,
NONE
,
trimTrailingSpaces
(
0
),
chomp
(
CLIP
),
onDocIndicator
(
NONE
),
onTabInIndentation
(
NONE
),
leadingSpaces
(
false
)
{}
BREAK
,
THROW
// input:
};
RegEx
end
;
// what condition ends this scalar?
enum
FOLD
{
bool
eatEnd
;
// should we eat that condition when we see it?
DONT_FOLD
,
int
indent
;
// what level of indentation should be eaten and ignored?
FOLD_BLOCK
,
bool
detectIndent
;
// should we try to autodetect the indent?
FOLD_FLOW
bool
eatLeadingWhitespace
;
// should we continue eating this delicious indentation after 'indent' spaces?
};
char
escape
;
// what character do we escape on (i.e., slash or single quote) (0 for none)
FOLD
fold
;
// how do we fold line ends?
struct
ScanScalarParams
{
bool
trimTrailingSpaces
;
// do we remove all trailing spaces (at the very end
)
ScanScalarParams
(
)
CHOMP
chomp
;
// do we strip, clip, or keep trailing newlines (at the very end)
:
eatEnd
(
false
),
// Note: strip means kill all, clip means keep at most one, keep means keep all
indent
(
0
),
ACTION
onDocIndicator
;
// what do we do if we see a document indicator?
detectIndent
(
false
),
ACTION
onTabInIndentation
;
// what do we do if we see a tab where we should be seeing indentation
space
s
eatLeadingWhite
space
(
0
),
escape
(
0
),
// output:
fold
(
DONT_FOLD
),
bool
lead
ingSpaces
;
trimTrail
ingSpaces
(
0
),
};
chomp
(
CLIP
),
onDocIndicator
(
NONE
),
std
::
string
ScanScalar
(
Stream
&
INPUT
,
ScanScalarParams
&
info
);
onTabInIndentation
(
NONE
),
}
leadingSpaces
(
false
)
{
}
#endif // SCANSCALAR_H_62B23520_7C8E_11DE_8A39_0800200C9A66
// input:
RegEx
end
;
// what condition ends this scalar?
bool
eatEnd
;
// should we eat that condition when we see it?
int
indent
;
// what level of indentation should be eaten and ignored?
bool
detectIndent
;
// should we try to autodetect the indent?
bool
eatLeadingWhitespace
;
// should we continue eating this delicious
// indentation after 'indent' spaces?
char
escape
;
// what character do we escape on (i.e., slash or single quote)
// (0 for none)
FOLD
fold
;
// how do we fold line ends?
bool
trimTrailingSpaces
;
// do we remove all trailing spaces (at the very
// end)
CHOMP
chomp
;
// do we strip, clip, or keep trailing newlines (at the very
// end)
// Note: strip means kill all, clip means keep at most one, keep means keep
// all
ACTION
onDocIndicator
;
// what do we do if we see a document indicator?
ACTION
onTabInIndentation
;
// what do we do if we see a tab where we should
// be seeing indentation spaces
// output:
bool
leadingSpaces
;
};
std
::
string
ScanScalar
(
Stream
&
INPUT
,
ScanScalarParams
&
info
);
}
#endif // SCANSCALAR_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/scantag.cpp
View file @
3355bbb3
...
@@ -3,82 +3,77 @@
...
@@ -3,82 +3,77 @@
#include "exp.h"
#include "exp.h"
#include "yaml-cpp/exceptions.h"
#include "yaml-cpp/exceptions.h"
namespace
YAML
namespace
YAML
{
{
const
std
::
string
ScanVerbatimTag
(
Stream
&
INPUT
)
{
const
std
::
string
ScanVerbatimTag
(
Stream
&
INPUT
)
std
::
string
tag
;
{
std
::
string
tag
;
// eat the start character
INPUT
.
get
();
// eat the start character
INPUT
.
get
();
while
(
INPUT
)
{
if
(
INPUT
.
peek
()
==
Keys
::
VerbatimTagEnd
)
{
while
(
INPUT
)
{
// eat the end character
if
(
INPUT
.
peek
()
==
Keys
::
VerbatimTagEnd
)
{
INPUT
.
get
();
// eat the end character
return
tag
;
INPUT
.
get
();
}
return
tag
;
}
int
n
=
Exp
::
URI
().
Match
(
INPUT
);
if
(
n
<=
0
)
int
n
=
Exp
::
URI
().
Match
(
INPUT
);
break
;
if
(
n
<=
0
)
break
;
tag
+=
INPUT
.
get
(
n
);
}
tag
+=
INPUT
.
get
(
n
);
}
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
END_OF_VERBATIM_TAG
);
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
END_OF_VERBATIM_TAG
);
}
const
std
::
string
ScanTagHandle
(
Stream
&
INPUT
,
bool
&
canBeHandle
)
{
std
::
string
tag
;
canBeHandle
=
true
;
Mark
firstNonWordChar
;
while
(
INPUT
)
{
if
(
INPUT
.
peek
()
==
Keys
::
Tag
)
{
if
(
!
canBeHandle
)
throw
ParserException
(
firstNonWordChar
,
ErrorMsg
::
CHAR_IN_TAG_HANDLE
);
break
;
}
int
n
=
0
;
if
(
canBeHandle
)
{
n
=
Exp
::
Word
().
Match
(
INPUT
);
if
(
n
<=
0
)
{
canBeHandle
=
false
;
firstNonWordChar
=
INPUT
.
mark
();
}
}
if
(
!
canBeHandle
)
n
=
Exp
::
Tag
().
Match
(
INPUT
);
if
(
n
<=
0
)
break
;
tag
+=
INPUT
.
get
(
n
);
}
return
tag
;
}
const
std
::
string
ScanTagSuffix
(
Stream
&
INPUT
)
{
std
::
string
tag
;
while
(
INPUT
)
{
int
n
=
Exp
::
Tag
().
Match
(
INPUT
);
if
(
n
<=
0
)
break
;
tag
+=
INPUT
.
get
(
n
);
}
if
(
tag
.
empty
())
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
TAG_WITH_NO_SUFFIX
);
return
tag
;
}
}
}
const
std
::
string
ScanTagHandle
(
Stream
&
INPUT
,
bool
&
canBeHandle
)
{
std
::
string
tag
;
canBeHandle
=
true
;
Mark
firstNonWordChar
;
while
(
INPUT
)
{
if
(
INPUT
.
peek
()
==
Keys
::
Tag
)
{
if
(
!
canBeHandle
)
throw
ParserException
(
firstNonWordChar
,
ErrorMsg
::
CHAR_IN_TAG_HANDLE
);
break
;
}
int
n
=
0
;
if
(
canBeHandle
)
{
n
=
Exp
::
Word
().
Match
(
INPUT
);
if
(
n
<=
0
)
{
canBeHandle
=
false
;
firstNonWordChar
=
INPUT
.
mark
();
}
}
if
(
!
canBeHandle
)
n
=
Exp
::
Tag
().
Match
(
INPUT
);
if
(
n
<=
0
)
break
;
tag
+=
INPUT
.
get
(
n
);
}
return
tag
;
}
const
std
::
string
ScanTagSuffix
(
Stream
&
INPUT
)
{
std
::
string
tag
;
while
(
INPUT
)
{
int
n
=
Exp
::
Tag
().
Match
(
INPUT
);
if
(
n
<=
0
)
break
;
tag
+=
INPUT
.
get
(
n
);
}
if
(
tag
.
empty
())
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
TAG_WITH_NO_SUFFIX
);
return
tag
;
}
}
src/scantag.h
View file @
3355bbb3
#ifndef SCANTAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#ifndef SCANTAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define SCANTAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define SCANTAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#pragma once
#endif
#endif
#include <string>
#include <string>
#include "stream.h"
#include "stream.h"
namespace
YAML
namespace
YAML
{
{
const
std
::
string
ScanVerbatimTag
(
Stream
&
INPUT
);
const
std
::
string
ScanVerbatimTag
(
Stream
&
INPUT
);
const
std
::
string
ScanTagHandle
(
Stream
&
INPUT
,
bool
&
canBeHandle
);
const
std
::
string
ScanTagHandle
(
Stream
&
INPUT
,
bool
&
canBeHandle
);
const
std
::
string
ScanTagSuffix
(
Stream
&
INPUT
);
const
std
::
string
ScanTagSuffix
(
Stream
&
INPUT
);
}
}
#endif // SCANTAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif // SCANTAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/scantoken.cpp
View file @
3355bbb3
...
@@ -7,433 +7,426 @@
...
@@ -7,433 +7,426 @@
#include "tag.h"
#include "tag.h"
#include <sstream>
#include <sstream>
namespace
YAML
namespace
YAML
{
{
///////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////
// Specialization for scanning specific tokens
// Specialization for scanning specific tokens
// Directive
// Directive
// . Note: no semantic checking is done here (that's for the parser to do)
// . Note: no semantic checking is done here (that's for the parser to do)
void
Scanner
::
ScanDirective
()
{
void
Scanner
::
ScanDirective
()
std
::
string
name
;
{
std
::
vector
<
std
::
string
>
params
;
std
::
string
name
;
std
::
vector
<
std
::
string
>
params
;
// pop indents and simple keys
PopAllIndents
();
// pop indents and simple keys
PopAllSimpleKeys
();
PopAllIndents
();
PopAllSimpleKeys
();
m_simpleKeyAllowed
=
false
;
m_canBeJSONFlow
=
false
;
m_simpleKeyAllowed
=
false
;
m_canBeJSONFlow
=
false
;
// store pos and eat indicator
Token
token
(
Token
::
DIRECTIVE
,
INPUT
.
mark
());
// store pos and eat indicator
INPUT
.
eat
(
1
);
Token
token
(
Token
::
DIRECTIVE
,
INPUT
.
mark
());
INPUT
.
eat
(
1
);
// read name
while
(
INPUT
&&
!
Exp
::
BlankOrBreak
().
Matches
(
INPUT
))
// read name
token
.
value
+=
INPUT
.
get
();
while
(
INPUT
&&
!
Exp
::
BlankOrBreak
().
Matches
(
INPUT
))
token
.
value
+=
INPUT
.
get
();
// read parameters
while
(
1
)
{
// read parameters
// first get rid of whitespace
while
(
1
)
{
while
(
Exp
::
Blank
().
Matches
(
INPUT
))
// first get rid of whitespace
INPUT
.
eat
(
1
);
while
(
Exp
::
Blank
().
Matches
(
INPUT
))
INPUT
.
eat
(
1
);
// break on newline or comment
if
(
!
INPUT
||
Exp
::
Break
().
Matches
(
INPUT
)
||
Exp
::
Comment
().
Matches
(
INPUT
))
// break on newline or comment
break
;
if
(
!
INPUT
||
Exp
::
Break
().
Matches
(
INPUT
)
||
Exp
::
Comment
().
Matches
(
INPUT
))
break
;
// now read parameter
std
::
string
param
;
// now read parameter
while
(
INPUT
&&
!
Exp
::
BlankOrBreak
().
Matches
(
INPUT
))
std
::
string
param
;
param
+=
INPUT
.
get
();
while
(
INPUT
&&
!
Exp
::
BlankOrBreak
().
Matches
(
INPUT
))
param
+=
INPUT
.
get
();
token
.
params
.
push_back
(
param
);
}
token
.
params
.
push_back
(
param
);
}
m_tokens
.
push
(
token
);
}
m_tokens
.
push
(
token
);
}
// DocStart
void
Scanner
::
ScanDocStart
()
{
// DocStart
PopAllIndents
();
void
Scanner
::
ScanDocStart
()
PopAllSimpleKeys
();
{
m_simpleKeyAllowed
=
false
;
PopAllIndents
();
m_canBeJSONFlow
=
false
;
PopAllSimpleKeys
();
m_simpleKeyAllowed
=
false
;
// eat
m_canBeJSONFlow
=
false
;
Mark
mark
=
INPUT
.
mark
();
INPUT
.
eat
(
3
);
// eat
m_tokens
.
push
(
Token
(
Token
::
DOC_START
,
mark
));
Mark
mark
=
INPUT
.
mark
();
}
INPUT
.
eat
(
3
);
m_tokens
.
push
(
Token
(
Token
::
DOC_START
,
mark
));
// DocEnd
}
void
Scanner
::
ScanDocEnd
()
{
PopAllIndents
();
// DocEnd
PopAllSimpleKeys
();
void
Scanner
::
ScanDocEnd
()
m_simpleKeyAllowed
=
false
;
{
m_canBeJSONFlow
=
false
;
PopAllIndents
();
PopAllSimpleKeys
();
// eat
m_simpleKeyAllowed
=
false
;
Mark
mark
=
INPUT
.
mark
();
m_canBeJSONFlow
=
false
;
INPUT
.
eat
(
3
);
m_tokens
.
push
(
Token
(
Token
::
DOC_END
,
mark
));
// eat
}
Mark
mark
=
INPUT
.
mark
();
INPUT
.
eat
(
3
);
// FlowStart
m_tokens
.
push
(
Token
(
Token
::
DOC_END
,
mark
));
void
Scanner
::
ScanFlowStart
()
{
}
// flows can be simple keys
InsertPotentialSimpleKey
();
// FlowStart
m_simpleKeyAllowed
=
true
;
void
Scanner
::
ScanFlowStart
()
m_canBeJSONFlow
=
false
;
{
// flows can be simple keys
// eat
InsertPotentialSimpleKey
();
Mark
mark
=
INPUT
.
mark
();
m_simpleKeyAllowed
=
true
;
char
ch
=
INPUT
.
get
();
m_canBeJSONFlow
=
false
;
FLOW_MARKER
flowType
=
(
ch
==
Keys
::
FlowSeqStart
?
FLOW_SEQ
:
FLOW_MAP
);
m_flows
.
push
(
flowType
);
// eat
Token
::
TYPE
type
=
Mark
mark
=
INPUT
.
mark
();
(
flowType
==
FLOW_SEQ
?
Token
::
FLOW_SEQ_START
:
Token
::
FLOW_MAP_START
);
char
ch
=
INPUT
.
get
();
m_tokens
.
push
(
Token
(
type
,
mark
));
FLOW_MARKER
flowType
=
(
ch
==
Keys
::
FlowSeqStart
?
FLOW_SEQ
:
FLOW_MAP
);
}
m_flows
.
push
(
flowType
);
Token
::
TYPE
type
=
(
flowType
==
FLOW_SEQ
?
Token
::
FLOW_SEQ_START
:
Token
::
FLOW_MAP_START
);
// FlowEnd
m_tokens
.
push
(
Token
(
type
,
mark
));
void
Scanner
::
ScanFlowEnd
()
{
}
if
(
InBlockContext
())
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
FLOW_END
);
// FlowEnd
void
Scanner
::
ScanFlowEnd
()
// we might have a solo entry in the flow context
{
if
(
InFlowContext
())
{
if
(
InBlockContext
())
if
(
m_flows
.
top
()
==
FLOW_MAP
&&
VerifySimpleKey
())
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
FLOW_END
);
m_tokens
.
push
(
Token
(
Token
::
VALUE
,
INPUT
.
mark
()));
else
if
(
m_flows
.
top
()
==
FLOW_SEQ
)
// we might have a solo entry in the flow context
InvalidateSimpleKey
();
if
(
InFlowContext
())
{
}
if
(
m_flows
.
top
()
==
FLOW_MAP
&&
VerifySimpleKey
())
m_tokens
.
push
(
Token
(
Token
::
VALUE
,
INPUT
.
mark
()));
m_simpleKeyAllowed
=
false
;
else
if
(
m_flows
.
top
()
==
FLOW_SEQ
)
m_canBeJSONFlow
=
true
;
InvalidateSimpleKey
();
}
// eat
Mark
mark
=
INPUT
.
mark
();
m_simpleKeyAllowed
=
false
;
char
ch
=
INPUT
.
get
();
m_canBeJSONFlow
=
true
;
// check that it matches the start
// eat
FLOW_MARKER
flowType
=
(
ch
==
Keys
::
FlowSeqEnd
?
FLOW_SEQ
:
FLOW_MAP
);
Mark
mark
=
INPUT
.
mark
();
if
(
m_flows
.
top
()
!=
flowType
)
char
ch
=
INPUT
.
get
();
throw
ParserException
(
mark
,
ErrorMsg
::
FLOW_END
);
m_flows
.
pop
();
// check that it matches the start
FLOW_MARKER
flowType
=
(
ch
==
Keys
::
FlowSeqEnd
?
FLOW_SEQ
:
FLOW_MAP
);
Token
::
TYPE
type
=
(
flowType
?
Token
::
FLOW_SEQ_END
:
Token
::
FLOW_MAP_END
);
if
(
m_flows
.
top
()
!=
flowType
)
m_tokens
.
push
(
Token
(
type
,
mark
));
throw
ParserException
(
mark
,
ErrorMsg
::
FLOW_END
);
}
m_flows
.
pop
();
// FlowEntry
Token
::
TYPE
type
=
(
flowType
?
Token
::
FLOW_SEQ_END
:
Token
::
FLOW_MAP_END
);
void
Scanner
::
ScanFlowEntry
()
{
m_tokens
.
push
(
Token
(
type
,
mark
));
// we might have a solo entry in the flow context
}
if
(
InFlowContext
())
{
if
(
m_flows
.
top
()
==
FLOW_MAP
&&
VerifySimpleKey
())
// FlowEntry
m_tokens
.
push
(
Token
(
Token
::
VALUE
,
INPUT
.
mark
()));
void
Scanner
::
ScanFlowEntry
()
else
if
(
m_flows
.
top
()
==
FLOW_SEQ
)
{
InvalidateSimpleKey
();
// we might have a solo entry in the flow context
}
if
(
InFlowContext
())
{
if
(
m_flows
.
top
()
==
FLOW_MAP
&&
VerifySimpleKey
())
m_simpleKeyAllowed
=
true
;
m_tokens
.
push
(
Token
(
Token
::
VALUE
,
INPUT
.
mark
()));
m_canBeJSONFlow
=
false
;
else
if
(
m_flows
.
top
()
==
FLOW_SEQ
)
InvalidateSimpleKey
();
// eat
}
Mark
mark
=
INPUT
.
mark
();
INPUT
.
eat
(
1
);
m_simpleKeyAllowed
=
true
;
m_tokens
.
push
(
Token
(
Token
::
FLOW_ENTRY
,
mark
));
m_canBeJSONFlow
=
false
;
}
// eat
// BlockEntry
Mark
mark
=
INPUT
.
mark
();
void
Scanner
::
ScanBlockEntry
()
{
INPUT
.
eat
(
1
);
// we better be in the block context!
m_tokens
.
push
(
Token
(
Token
::
FLOW_ENTRY
,
mark
));
if
(
InFlowContext
())
}
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
BLOCK_ENTRY
);
// BlockEntry
// can we put it here?
void
Scanner
::
ScanBlockEntry
()
if
(
!
m_simpleKeyAllowed
)
{
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
BLOCK_ENTRY
);
// we better be in the block context!
if
(
InFlowContext
())
PushIndentTo
(
INPUT
.
column
(),
IndentMarker
::
SEQ
);
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
BLOCK_ENTRY
);
m_simpleKeyAllowed
=
true
;
m_canBeJSONFlow
=
false
;
// can we put it here?
if
(
!
m_simpleKeyAllowed
)
// eat
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
BLOCK_ENTRY
);
Mark
mark
=
INPUT
.
mark
();
INPUT
.
eat
(
1
);
PushIndentTo
(
INPUT
.
column
(),
IndentMarker
::
SEQ
);
m_tokens
.
push
(
Token
(
Token
::
BLOCK_ENTRY
,
mark
));
m_simpleKeyAllowed
=
true
;
}
m_canBeJSONFlow
=
false
;
// Key
// eat
void
Scanner
::
ScanKey
()
{
Mark
mark
=
INPUT
.
mark
();
// handle keys diffently in the block context (and manage indents)
INPUT
.
eat
(
1
);
if
(
InBlockContext
())
{
m_tokens
.
push
(
Token
(
Token
::
BLOCK_ENTRY
,
mark
));
if
(
!
m_simpleKeyAllowed
)
}
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
MAP_KEY
);
// Key
PushIndentTo
(
INPUT
.
column
(),
IndentMarker
::
MAP
);
void
Scanner
::
ScanKey
()
}
{
// handle keys diffently in the block context (and manage indents)
// can only put a simple key here if we're in block context
if
(
InBlockContext
())
{
m_simpleKeyAllowed
=
InBlockContext
();
if
(
!
m_simpleKeyAllowed
)
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
MAP_KEY
);
// eat
Mark
mark
=
INPUT
.
mark
();
PushIndentTo
(
INPUT
.
column
(),
IndentMarker
::
MAP
);
INPUT
.
eat
(
1
);
}
m_tokens
.
push
(
Token
(
Token
::
KEY
,
mark
));
}
// can only put a simple key here if we're in block context
m_simpleKeyAllowed
=
InBlockContext
();
// Value
void
Scanner
::
ScanValue
()
{
// eat
// and check that simple key
Mark
mark
=
INPUT
.
mark
();
bool
isSimpleKey
=
VerifySimpleKey
();
INPUT
.
eat
(
1
);
m_canBeJSONFlow
=
false
;
m_tokens
.
push
(
Token
(
Token
::
KEY
,
mark
));
}
if
(
isSimpleKey
)
{
// can't follow a simple key with another simple key (dunno why, though - it
// Value
// seems fine)
void
Scanner
::
ScanValue
()
m_simpleKeyAllowed
=
false
;
{
}
else
{
// and check that simple key
// handle values diffently in the block context (and manage indents)
bool
isSimpleKey
=
VerifySimpleKey
();
if
(
InBlockContext
())
{
m_canBeJSONFlow
=
false
;
if
(
!
m_simpleKeyAllowed
)
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
MAP_VALUE
);
if
(
isSimpleKey
)
{
// can't follow a simple key with another simple key (dunno why, though - it seems fine)
PushIndentTo
(
INPUT
.
column
(),
IndentMarker
::
MAP
);
m_simpleKeyAllowed
=
false
;
}
}
else
{
// handle values diffently in the block context (and manage indents)
// can only put a simple key here if we're in block context
if
(
InBlockContext
())
{
m_simpleKeyAllowed
=
InBlockContext
();
if
(
!
m_simpleKeyAllowed
)
}
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
MAP_VALUE
);
// eat
PushIndentTo
(
INPUT
.
column
(),
IndentMarker
::
MAP
);
Mark
mark
=
INPUT
.
mark
();
}
INPUT
.
eat
(
1
);
m_tokens
.
push
(
Token
(
Token
::
VALUE
,
mark
));
// can only put a simple key here if we're in block context
}
m_simpleKeyAllowed
=
InBlockContext
();
}
// AnchorOrAlias
void
Scanner
::
ScanAnchorOrAlias
()
{
// eat
bool
alias
;
Mark
mark
=
INPUT
.
mark
();
std
::
string
name
;
INPUT
.
eat
(
1
);
m_tokens
.
push
(
Token
(
Token
::
VALUE
,
mark
));
// insert a potential simple key
}
InsertPotentialSimpleKey
();
m_simpleKeyAllowed
=
false
;
// AnchorOrAlias
m_canBeJSONFlow
=
false
;
void
Scanner
::
ScanAnchorOrAlias
()
{
// eat the indicator
bool
alias
;
Mark
mark
=
INPUT
.
mark
();
std
::
string
name
;
char
indicator
=
INPUT
.
get
();
alias
=
(
indicator
==
Keys
::
Alias
);
// insert a potential simple key
InsertPotentialSimpleKey
();
// now eat the content
m_simpleKeyAllowed
=
false
;
while
(
INPUT
&&
Exp
::
Anchor
().
Matches
(
INPUT
))
m_canBeJSONFlow
=
false
;
name
+=
INPUT
.
get
();
// eat the indicator
// we need to have read SOMETHING!
Mark
mark
=
INPUT
.
mark
();
if
(
name
.
empty
())
char
indicator
=
INPUT
.
get
();
throw
ParserException
(
INPUT
.
mark
(),
alias
?
ErrorMsg
::
ALIAS_NOT_FOUND
alias
=
(
indicator
==
Keys
::
Alias
);
:
ErrorMsg
::
ANCHOR_NOT_FOUND
);
// now eat the content
// and needs to end correctly
while
(
INPUT
&&
Exp
::
Anchor
().
Matches
(
INPUT
))
if
(
INPUT
&&
!
Exp
::
AnchorEnd
().
Matches
(
INPUT
))
name
+=
INPUT
.
get
();
throw
ParserException
(
INPUT
.
mark
(),
alias
?
ErrorMsg
::
CHAR_IN_ALIAS
:
ErrorMsg
::
CHAR_IN_ANCHOR
);
// we need to have read SOMETHING!
if
(
name
.
empty
())
// and we're done
throw
ParserException
(
INPUT
.
mark
(),
alias
?
ErrorMsg
::
ALIAS_NOT_FOUND
:
ErrorMsg
::
ANCHOR_NOT_FOUND
);
Token
token
(
alias
?
Token
::
ALIAS
:
Token
::
ANCHOR
,
mark
);
token
.
value
=
name
;
// and needs to end correctly
m_tokens
.
push
(
token
);
if
(
INPUT
&&
!
Exp
::
AnchorEnd
().
Matches
(
INPUT
))
}
throw
ParserException
(
INPUT
.
mark
(),
alias
?
ErrorMsg
::
CHAR_IN_ALIAS
:
ErrorMsg
::
CHAR_IN_ANCHOR
);
// Tag
// and we're done
void
Scanner
::
ScanTag
()
{
Token
token
(
alias
?
Token
::
ALIAS
:
Token
::
ANCHOR
,
mark
);
// insert a potential simple key
token
.
value
=
name
;
InsertPotentialSimpleKey
();
m_tokens
.
push
(
token
);
m_simpleKeyAllowed
=
false
;
}
m_canBeJSONFlow
=
false
;
// Tag
Token
token
(
Token
::
TAG
,
INPUT
.
mark
());
void
Scanner
::
ScanTag
()
{
// eat the indicator
// insert a potential simple key
INPUT
.
get
();
InsertPotentialSimpleKey
();
m_simpleKeyAllowed
=
false
;
if
(
INPUT
&&
INPUT
.
peek
()
==
Keys
::
VerbatimTagStart
)
{
m_canBeJSONFlow
=
false
;
std
::
string
tag
=
ScanVerbatimTag
(
INPUT
);
Token
token
(
Token
::
TAG
,
INPUT
.
mark
());
token
.
value
=
tag
;
token
.
data
=
Tag
::
VERBATIM
;
// eat the indicator
}
else
{
INPUT
.
get
();
bool
canBeHandle
;
token
.
value
=
ScanTagHandle
(
INPUT
,
canBeHandle
);
if
(
INPUT
&&
INPUT
.
peek
()
==
Keys
::
VerbatimTagStart
){
if
(
!
canBeHandle
&&
token
.
value
.
empty
())
std
::
string
tag
=
ScanVerbatimTag
(
INPUT
);
token
.
data
=
Tag
::
NON_SPECIFIC
;
else
if
(
token
.
value
.
empty
())
token
.
value
=
tag
;
token
.
data
=
Tag
::
SECONDARY_HANDLE
;
token
.
data
=
Tag
::
VERBATIM
;
else
}
else
{
token
.
data
=
Tag
::
PRIMARY_HANDLE
;
bool
canBeHandle
;
token
.
value
=
ScanTagHandle
(
INPUT
,
canBeHandle
);
// is there a suffix?
if
(
!
canBeHandle
&&
token
.
value
.
empty
())
if
(
canBeHandle
&&
INPUT
.
peek
()
==
Keys
::
Tag
)
{
token
.
data
=
Tag
::
NON_SPECIFIC
;
// eat the indicator
else
if
(
token
.
value
.
empty
())
INPUT
.
get
();
token
.
data
=
Tag
::
SECONDARY_HANDLE
;
token
.
params
.
push_back
(
ScanTagSuffix
(
INPUT
));
else
token
.
data
=
Tag
::
NAMED_HANDLE
;
token
.
data
=
Tag
::
PRIMARY_HANDLE
;
}
}
// is there a suffix?
if
(
canBeHandle
&&
INPUT
.
peek
()
==
Keys
::
Tag
)
{
m_tokens
.
push
(
token
);
// eat the indicator
}
INPUT
.
get
();
token
.
params
.
push_back
(
ScanTagSuffix
(
INPUT
));
// PlainScalar
token
.
data
=
Tag
::
NAMED_HANDLE
;
void
Scanner
::
ScanPlainScalar
()
{
}
std
::
string
scalar
;
}
// set up the scanning parameters
m_tokens
.
push
(
token
);
ScanScalarParams
params
;
}
params
.
end
=
(
InFlowContext
()
?
Exp
::
EndScalarInFlow
()
:
Exp
::
EndScalar
())
||
(
Exp
::
BlankOrBreak
()
+
Exp
::
Comment
());
// PlainScalar
params
.
eatEnd
=
false
;
void
Scanner
::
ScanPlainScalar
()
params
.
indent
=
(
InFlowContext
()
?
0
:
GetTopIndent
()
+
1
);
{
params
.
fold
=
FOLD_FLOW
;
std
::
string
scalar
;
params
.
eatLeadingWhitespace
=
true
;
params
.
trimTrailingSpaces
=
true
;
// set up the scanning parameters
params
.
chomp
=
STRIP
;
ScanScalarParams
params
;
params
.
onDocIndicator
=
BREAK
;
params
.
end
=
(
InFlowContext
()
?
Exp
::
EndScalarInFlow
()
:
Exp
::
EndScalar
())
||
(
Exp
::
BlankOrBreak
()
+
Exp
::
Comment
());
params
.
onTabInIndentation
=
THROW
;
params
.
eatEnd
=
false
;
params
.
indent
=
(
InFlowContext
()
?
0
:
GetTopIndent
()
+
1
);
// insert a potential simple key
params
.
fold
=
FOLD_FLOW
;
InsertPotentialSimpleKey
();
params
.
eatLeadingWhitespace
=
true
;
params
.
trimTrailingSpaces
=
true
;
Mark
mark
=
INPUT
.
mark
();
params
.
chomp
=
STRIP
;
scalar
=
ScanScalar
(
INPUT
,
params
);
params
.
onDocIndicator
=
BREAK
;
params
.
onTabInIndentation
=
THROW
;
// can have a simple key only if we ended the scalar by starting a new line
m_simpleKeyAllowed
=
params
.
leadingSpaces
;
// insert a potential simple key
m_canBeJSONFlow
=
false
;
InsertPotentialSimpleKey
();
// finally, check and see if we ended on an illegal character
Mark
mark
=
INPUT
.
mark
();
// if(Exp::IllegalCharInScalar.Matches(INPUT))
scalar
=
ScanScalar
(
INPUT
,
params
);
// throw ParserException(INPUT.mark(), ErrorMsg::CHAR_IN_SCALAR);
// can have a simple key only if we ended the scalar by starting a new line
Token
token
(
Token
::
PLAIN_SCALAR
,
mark
);
m_simpleKeyAllowed
=
params
.
leadingSpaces
;
token
.
value
=
scalar
;
m_canBeJSONFlow
=
false
;
m_tokens
.
push
(
token
);
}
// finally, check and see if we ended on an illegal character
//if(Exp::IllegalCharInScalar.Matches(INPUT))
// QuotedScalar
// throw ParserException(INPUT.mark(), ErrorMsg::CHAR_IN_SCALAR);
void
Scanner
::
ScanQuotedScalar
()
{
std
::
string
scalar
;
Token
token
(
Token
::
PLAIN_SCALAR
,
mark
);
token
.
value
=
scalar
;
// peek at single or double quote (don't eat because we need to preserve (for
m_tokens
.
push
(
token
);
// the time being) the input position)
}
char
quote
=
INPUT
.
peek
();
bool
single
=
(
quote
==
'\''
);
// QuotedScalar
void
Scanner
::
ScanQuotedScalar
()
// setup the scanning parameters
{
ScanScalarParams
params
;
std
::
string
scalar
;
params
.
end
=
(
single
?
RegEx
(
quote
)
&&
!
Exp
::
EscSingleQuote
()
:
RegEx
(
quote
));
params
.
eatEnd
=
true
;
// peek at single or double quote (don't eat because we need to preserve (for the time being) the input position)
params
.
escape
=
(
single
?
'\''
:
'\\'
);
char
quote
=
INPUT
.
peek
();
params
.
indent
=
0
;
bool
single
=
(
quote
==
'\''
);
params
.
fold
=
FOLD_FLOW
;
params
.
eatLeadingWhitespace
=
true
;
// setup the scanning parameters
params
.
trimTrailingSpaces
=
false
;
ScanScalarParams
params
;
params
.
chomp
=
CLIP
;
params
.
end
=
(
single
?
RegEx
(
quote
)
&&
!
Exp
::
EscSingleQuote
()
:
RegEx
(
quote
));
params
.
onDocIndicator
=
THROW
;
params
.
eatEnd
=
true
;
params
.
escape
=
(
single
?
'\''
:
'\\'
);
// insert a potential simple key
params
.
indent
=
0
;
InsertPotentialSimpleKey
();
params
.
fold
=
FOLD_FLOW
;
params
.
eatLeadingWhitespace
=
true
;
Mark
mark
=
INPUT
.
mark
();
params
.
trimTrailingSpaces
=
false
;
params
.
chomp
=
CLIP
;
// now eat that opening quote
params
.
onDocIndicator
=
THROW
;
INPUT
.
get
();
// insert a potential simple key
// and scan
InsertPotentialSimpleKey
();
scalar
=
ScanScalar
(
INPUT
,
params
);
m_simpleKeyAllowed
=
false
;
Mark
mark
=
INPUT
.
mark
();
m_canBeJSONFlow
=
true
;
// now eat that opening quote
Token
token
(
Token
::
NON_PLAIN_SCALAR
,
mark
);
INPUT
.
get
();
token
.
value
=
scalar
;
m_tokens
.
push
(
token
);
// and scan
}
scalar
=
ScanScalar
(
INPUT
,
params
);
m_simpleKeyAllowed
=
false
;
// BlockScalarToken
m_canBeJSONFlow
=
true
;
// . These need a little extra processing beforehand.
// . We need to scan the line where the indicator is (this doesn't count as part
Token
token
(
Token
::
NON_PLAIN_SCALAR
,
mark
);
// of the scalar),
token
.
value
=
scalar
;
// and then we need to figure out what level of indentation we'll be using.
m_tokens
.
push
(
token
);
void
Scanner
::
ScanBlockScalar
()
{
}
std
::
string
scalar
;
// BlockScalarToken
ScanScalarParams
params
;
// . These need a little extra processing beforehand.
params
.
indent
=
1
;
// . We need to scan the line where the indicator is (this doesn't count as part of the scalar),
params
.
detectIndent
=
true
;
// and then we need to figure out what level of indentation we'll be using.
void
Scanner
::
ScanBlockScalar
()
// eat block indicator ('|' or '>')
{
Mark
mark
=
INPUT
.
mark
();
std
::
string
scalar
;
char
indicator
=
INPUT
.
get
();
params
.
fold
=
(
indicator
==
Keys
::
FoldedScalar
?
FOLD_BLOCK
:
DONT_FOLD
);
ScanScalarParams
params
;
params
.
indent
=
1
;
// eat chomping/indentation indicators
params
.
detectIndent
=
true
;
params
.
chomp
=
CLIP
;
int
n
=
Exp
::
Chomp
().
Match
(
INPUT
);
// eat block indicator ('|' or '>')
for
(
int
i
=
0
;
i
<
n
;
i
++
)
{
Mark
mark
=
INPUT
.
mark
();
char
ch
=
INPUT
.
get
();
char
indicator
=
INPUT
.
get
();
if
(
ch
==
'+'
)
params
.
fold
=
(
indicator
==
Keys
::
FoldedScalar
?
FOLD_BLOCK
:
DONT_FOLD
);
params
.
chomp
=
KEEP
;
else
if
(
ch
==
'-'
)
// eat chomping/indentation indicators
params
.
chomp
=
STRIP
;
params
.
chomp
=
CLIP
;
else
if
(
Exp
::
Digit
().
Matches
(
ch
))
{
int
n
=
Exp
::
Chomp
().
Match
(
INPUT
);
if
(
ch
==
'0'
)
for
(
int
i
=
0
;
i
<
n
;
i
++
)
{
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
ZERO_INDENT_IN_BLOCK
);
char
ch
=
INPUT
.
get
();
if
(
ch
==
'+'
)
params
.
indent
=
ch
-
'0'
;
params
.
chomp
=
KEEP
;
params
.
detectIndent
=
false
;
else
if
(
ch
==
'-'
)
}
params
.
chomp
=
STRIP
;
}
else
if
(
Exp
::
Digit
().
Matches
(
ch
))
{
if
(
ch
==
'0'
)
// now eat whitespace
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
ZERO_INDENT_IN_BLOCK
);
while
(
Exp
::
Blank
().
Matches
(
INPUT
))
INPUT
.
eat
(
1
);
params
.
indent
=
ch
-
'0'
;
params
.
detectIndent
=
false
;
// and comments to the end of the line
}
if
(
Exp
::
Comment
().
Matches
(
INPUT
))
}
while
(
INPUT
&&
!
Exp
::
Break
().
Matches
(
INPUT
))
INPUT
.
eat
(
1
);
// now eat whitespace
while
(
Exp
::
Blank
().
Matches
(
INPUT
))
// if it's not a line break, then we ran into a bad character inline
INPUT
.
eat
(
1
);
if
(
INPUT
&&
!
Exp
::
Break
().
Matches
(
INPUT
))
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
CHAR_IN_BLOCK
);
// and comments to the end of the line
if
(
Exp
::
Comment
().
Matches
(
INPUT
))
// set the initial indentation
while
(
INPUT
&&
!
Exp
::
Break
().
Matches
(
INPUT
))
if
(
GetTopIndent
()
>=
0
)
INPUT
.
eat
(
1
);
params
.
indent
+=
GetTopIndent
();
// if it's not a line break, then we ran into a bad character inline
params
.
eatLeadingWhitespace
=
false
;
if
(
INPUT
&&
!
Exp
::
Break
().
Matches
(
INPUT
))
params
.
trimTrailingSpaces
=
false
;
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
CHAR_IN_BLOCK
);
params
.
onTabInIndentation
=
THROW
;
// set the initial indentation
scalar
=
ScanScalar
(
INPUT
,
params
);
if
(
GetTopIndent
()
>=
0
)
params
.
indent
+=
GetTopIndent
();
// simple keys always ok after block scalars (since we're gonna start a new
// line anyways)
params
.
eatLeadingWhitespace
=
false
;
m_simpleKeyAllowed
=
true
;
params
.
trimTrailingSpaces
=
false
;
m_canBeJSONFlow
=
false
;
params
.
onTabInIndentation
=
THROW
;
Token
token
(
Token
::
NON_PLAIN_SCALAR
,
mark
);
scalar
=
ScanScalar
(
INPUT
,
params
);
token
.
value
=
scalar
;
m_tokens
.
push
(
token
);
// simple keys always ok after block scalars (since we're gonna start a new line anyways)
}
m_simpleKeyAllowed
=
true
;
m_canBeJSONFlow
=
false
;
Token
token
(
Token
::
NON_PLAIN_SCALAR
,
mark
);
token
.
value
=
scalar
;
m_tokens
.
push
(
token
);
}
}
}
src/setting.h
View file @
3355bbb3
#ifndef SETTING_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#ifndef SETTING_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define SETTING_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define SETTING_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#pragma once
#endif
#endif
#include <memory>
#include <memory>
#include <vector>
#include <vector>
#include "yaml-cpp/noncopyable.h"
#include "yaml-cpp/noncopyable.h"
namespace
YAML
namespace
YAML
{
{
class
SettingChangeBase
;
class
SettingChangeBase
;
template
<
typename
T
>
template
<
typename
T
>
class
Setting
{
class
Setting
public:
{
Setting
()
:
m_value
()
{}
public:
Setting
()
:
m_value
()
{}
const
T
get
()
const
{
return
m_value
;
}
std
::
auto_ptr
<
SettingChangeBase
>
set
(
const
T
&
value
);
const
T
get
()
const
{
return
m_value
;
}
void
restore
(
const
Setting
<
T
>&
oldSetting
)
{
m_value
=
oldSetting
.
get
();
}
std
::
auto_ptr
<
SettingChangeBase
>
set
(
const
T
&
value
);
void
restore
(
const
Setting
<
T
>&
oldSetting
)
{
private:
m_value
=
oldSetting
.
get
();
T
m_value
;
}
};
private:
class
SettingChangeBase
{
T
m_value
;
public:
};
virtual
~
SettingChangeBase
()
{}
virtual
void
pop
()
=
0
;
class
SettingChangeBase
};
{
public:
template
<
typename
T
>
virtual
~
SettingChangeBase
()
{}
class
SettingChange
:
public
SettingChangeBase
{
virtual
void
pop
()
=
0
;
public:
};
SettingChange
(
Setting
<
T
>*
pSetting
)
:
m_pCurSetting
(
pSetting
)
{
// copy old setting to save its state
template
<
typename
T
>
m_oldSetting
=
*
pSetting
;
class
SettingChange
:
public
SettingChangeBase
}
{
public:
virtual
void
pop
()
{
m_pCurSetting
->
restore
(
m_oldSetting
);
}
SettingChange
(
Setting
<
T
>
*
pSetting
)
:
m_pCurSetting
(
pSetting
)
{
// copy old setting to save its state
private:
m_oldSetting
=
*
pSetting
;
Setting
<
T
>*
m_pCurSetting
;
}
Setting
<
T
>
m_oldSetting
;
};
virtual
void
pop
()
{
m_pCurSetting
->
restore
(
m_oldSetting
);
template
<
typename
T
>
}
inline
std
::
auto_ptr
<
SettingChangeBase
>
Setting
<
T
>::
set
(
const
T
&
value
)
{
std
::
auto_ptr
<
SettingChangeBase
>
pChange
(
new
SettingChange
<
T
>
(
this
));
private:
m_value
=
value
;
Setting
<
T
>
*
m_pCurSetting
;
return
pChange
;
Setting
<
T
>
m_oldSetting
;
}
};
class
SettingChanges
:
private
noncopyable
{
template
<
typename
T
>
public:
inline
std
::
auto_ptr
<
SettingChangeBase
>
Setting
<
T
>::
set
(
const
T
&
value
)
{
SettingChanges
()
{}
std
::
auto_ptr
<
SettingChangeBase
>
pChange
(
new
SettingChange
<
T
>
(
this
));
~
SettingChanges
()
{
clear
();
}
m_value
=
value
;
return
pChange
;
void
clear
()
{
}
restore
();
class
SettingChanges
:
private
noncopyable
for
(
setting_changes
::
const_iterator
it
=
m_settingChanges
.
begin
();
{
it
!=
m_settingChanges
.
end
();
++
it
)
public:
delete
*
it
;
SettingChanges
()
{}
m_settingChanges
.
clear
();
~
SettingChanges
()
{
clear
();
}
}
void
clear
()
{
void
restore
()
{
restore
();
for
(
setting_changes
::
const_iterator
it
=
m_settingChanges
.
begin
();
it
!=
m_settingChanges
.
end
();
++
it
)
for
(
setting_changes
::
const_iterator
it
=
m_settingChanges
.
begin
();
it
!=
m_settingChanges
.
end
();
++
it
)
(
*
it
)
->
pop
();
delete
*
it
;
}
m_settingChanges
.
clear
();
}
void
push
(
std
::
auto_ptr
<
SettingChangeBase
>
pSettingChange
)
{
m_settingChanges
.
push_back
(
pSettingChange
.
release
());
void
restore
()
{
}
for
(
setting_changes
::
const_iterator
it
=
m_settingChanges
.
begin
();
it
!=
m_settingChanges
.
end
();
++
it
)
(
*
it
)
->
pop
();
// like std::auto_ptr - assignment is transfer of ownership
}
SettingChanges
&
operator
=
(
SettingChanges
&
rhs
)
{
if
(
this
==
&
rhs
)
void
push
(
std
::
auto_ptr
<
SettingChangeBase
>
pSettingChange
)
{
return
*
this
;
m_settingChanges
.
push_back
(
pSettingChange
.
release
());
}
clear
();
m_settingChanges
=
rhs
.
m_settingChanges
;
// like std::auto_ptr - assignment is transfer of ownership
rhs
.
m_settingChanges
.
clear
();
SettingChanges
&
operator
=
(
SettingChanges
&
rhs
)
{
return
*
this
;
if
(
this
==
&
rhs
)
}
return
*
this
;
private:
clear
();
typedef
std
::
vector
<
SettingChangeBase
*>
setting_changes
;
m_settingChanges
=
rhs
.
m_settingChanges
;
setting_changes
m_settingChanges
;
rhs
.
m_settingChanges
.
clear
();
};
return
*
this
;
}
private:
typedef
std
::
vector
<
SettingChangeBase
*>
setting_changes
;
setting_changes
m_settingChanges
;
};
}
}
#endif // SETTING_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif
// SETTING_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/simplekey.cpp
View file @
3355bbb3
...
@@ -3,137 +3,126 @@
...
@@ -3,137 +3,126 @@
#include "yaml-cpp/exceptions.h"
#include "yaml-cpp/exceptions.h"
#include "exp.h"
#include "exp.h"
namespace
YAML
namespace
YAML
{
{
Scanner
::
SimpleKey
::
SimpleKey
(
const
Mark
&
mark_
,
int
flowLevel_
)
Scanner
::
SimpleKey
::
SimpleKey
(
const
Mark
&
mark_
,
int
flowLevel_
)
:
mark
(
mark_
),
flowLevel
(
flowLevel_
),
pIndent
(
0
),
pMapStart
(
0
),
pKey
(
0
)
{}
:
mark
(
mark_
),
flowLevel
(
flowLevel_
),
pIndent
(
0
),
pMapStart
(
0
),
pKey
(
0
)
{
void
Scanner
::
SimpleKey
::
Validate
()
{
}
// Note: pIndent will *not* be garbage here;
// we "garbage collect" them so we can
void
Scanner
::
SimpleKey
::
Validate
()
// always refer to them
{
if
(
pIndent
)
// Note: pIndent will *not* be garbage here;
pIndent
->
status
=
IndentMarker
::
VALID
;
// we "garbage collect" them so we can
if
(
pMapStart
)
// always refer to them
pMapStart
->
status
=
Token
::
VALID
;
if
(
pIndent
)
if
(
pKey
)
pIndent
->
status
=
IndentMarker
::
VALID
;
pKey
->
status
=
Token
::
VALID
;
if
(
pMapStart
)
pMapStart
->
status
=
Token
::
VALID
;
if
(
pKey
)
pKey
->
status
=
Token
::
VALID
;
}
void
Scanner
::
SimpleKey
::
Invalidate
()
{
if
(
pIndent
)
pIndent
->
status
=
IndentMarker
::
INVALID
;
if
(
pMapStart
)
pMapStart
->
status
=
Token
::
INVALID
;
if
(
pKey
)
pKey
->
status
=
Token
::
INVALID
;
}
// CanInsertPotentialSimpleKey
bool
Scanner
::
CanInsertPotentialSimpleKey
()
const
{
if
(
!
m_simpleKeyAllowed
)
return
false
;
return
!
ExistsActiveSimpleKey
();
}
// ExistsActiveSimpleKey
// . Returns true if there's a potential simple key at our flow level
// (there's allowed at most one per flow level, i.e., at the start of the flow start token)
bool
Scanner
::
ExistsActiveSimpleKey
()
const
{
if
(
m_simpleKeys
.
empty
())
return
false
;
const
SimpleKey
&
key
=
m_simpleKeys
.
top
();
return
key
.
flowLevel
==
GetFlowLevel
();
}
// InsertPotentialSimpleKey
// . If we can, add a potential simple key to the queue,
// and save it on a stack.
void
Scanner
::
InsertPotentialSimpleKey
()
{
if
(
!
CanInsertPotentialSimpleKey
())
return
;
SimpleKey
key
(
INPUT
.
mark
(),
GetFlowLevel
());
// first add a map start, if necessary
if
(
InBlockContext
())
{
key
.
pIndent
=
PushIndentTo
(
INPUT
.
column
(),
IndentMarker
::
MAP
);
if
(
key
.
pIndent
)
{
key
.
pIndent
->
status
=
IndentMarker
::
UNKNOWN
;
key
.
pMapStart
=
key
.
pIndent
->
pStartToken
;
key
.
pMapStart
->
status
=
Token
::
UNVERIFIED
;
}
}
// then add the (now unverified) key
m_tokens
.
push
(
Token
(
Token
::
KEY
,
INPUT
.
mark
()));
key
.
pKey
=
&
m_tokens
.
back
();
key
.
pKey
->
status
=
Token
::
UNVERIFIED
;
m_simpleKeys
.
push
(
key
);
}
// InvalidateSimpleKey
// . Automatically invalidate the simple key in our flow level
void
Scanner
::
InvalidateSimpleKey
()
{
if
(
m_simpleKeys
.
empty
())
return
;
// grab top key
SimpleKey
&
key
=
m_simpleKeys
.
top
();
if
(
key
.
flowLevel
!=
GetFlowLevel
())
return
;
key
.
Invalidate
();
m_simpleKeys
.
pop
();
}
// VerifySimpleKey
// . Determines whether the latest simple key to be added is valid,
// and if so, makes it valid.
bool
Scanner
::
VerifySimpleKey
()
{
if
(
m_simpleKeys
.
empty
())
return
false
;
// grab top key
SimpleKey
key
=
m_simpleKeys
.
top
();
// only validate if we're in the correct flow level
if
(
key
.
flowLevel
!=
GetFlowLevel
())
return
false
;
m_simpleKeys
.
pop
();
bool
isValid
=
true
;
// needs to be less than 1024 characters and inline
if
(
INPUT
.
line
()
!=
key
.
mark
.
line
||
INPUT
.
pos
()
-
key
.
mark
.
pos
>
1024
)
isValid
=
false
;
// invalidate key
if
(
isValid
)
key
.
Validate
();
else
key
.
Invalidate
();
return
isValid
;
}
void
Scanner
::
PopAllSimpleKeys
()
{
while
(
!
m_simpleKeys
.
empty
())
m_simpleKeys
.
pop
();
}
}
}
void
Scanner
::
SimpleKey
::
Invalidate
()
{
if
(
pIndent
)
pIndent
->
status
=
IndentMarker
::
INVALID
;
if
(
pMapStart
)
pMapStart
->
status
=
Token
::
INVALID
;
if
(
pKey
)
pKey
->
status
=
Token
::
INVALID
;
}
// CanInsertPotentialSimpleKey
bool
Scanner
::
CanInsertPotentialSimpleKey
()
const
{
if
(
!
m_simpleKeyAllowed
)
return
false
;
return
!
ExistsActiveSimpleKey
();
}
// ExistsActiveSimpleKey
// . Returns true if there's a potential simple key at our flow level
// (there's allowed at most one per flow level, i.e., at the start of the flow
// start token)
bool
Scanner
::
ExistsActiveSimpleKey
()
const
{
if
(
m_simpleKeys
.
empty
())
return
false
;
const
SimpleKey
&
key
=
m_simpleKeys
.
top
();
return
key
.
flowLevel
==
GetFlowLevel
();
}
// InsertPotentialSimpleKey
// . If we can, add a potential simple key to the queue,
// and save it on a stack.
void
Scanner
::
InsertPotentialSimpleKey
()
{
if
(
!
CanInsertPotentialSimpleKey
())
return
;
SimpleKey
key
(
INPUT
.
mark
(),
GetFlowLevel
());
// first add a map start, if necessary
if
(
InBlockContext
())
{
key
.
pIndent
=
PushIndentTo
(
INPUT
.
column
(),
IndentMarker
::
MAP
);
if
(
key
.
pIndent
)
{
key
.
pIndent
->
status
=
IndentMarker
::
UNKNOWN
;
key
.
pMapStart
=
key
.
pIndent
->
pStartToken
;
key
.
pMapStart
->
status
=
Token
::
UNVERIFIED
;
}
}
// then add the (now unverified) key
m_tokens
.
push
(
Token
(
Token
::
KEY
,
INPUT
.
mark
()));
key
.
pKey
=
&
m_tokens
.
back
();
key
.
pKey
->
status
=
Token
::
UNVERIFIED
;
m_simpleKeys
.
push
(
key
);
}
// InvalidateSimpleKey
// . Automatically invalidate the simple key in our flow level
void
Scanner
::
InvalidateSimpleKey
()
{
if
(
m_simpleKeys
.
empty
())
return
;
// grab top key
SimpleKey
&
key
=
m_simpleKeys
.
top
();
if
(
key
.
flowLevel
!=
GetFlowLevel
())
return
;
key
.
Invalidate
();
m_simpleKeys
.
pop
();
}
// VerifySimpleKey
// . Determines whether the latest simple key to be added is valid,
// and if so, makes it valid.
bool
Scanner
::
VerifySimpleKey
()
{
if
(
m_simpleKeys
.
empty
())
return
false
;
// grab top key
SimpleKey
key
=
m_simpleKeys
.
top
();
// only validate if we're in the correct flow level
if
(
key
.
flowLevel
!=
GetFlowLevel
())
return
false
;
m_simpleKeys
.
pop
();
bool
isValid
=
true
;
// needs to be less than 1024 characters and inline
if
(
INPUT
.
line
()
!=
key
.
mark
.
line
||
INPUT
.
pos
()
-
key
.
mark
.
pos
>
1024
)
isValid
=
false
;
// invalidate key
if
(
isValid
)
key
.
Validate
();
else
key
.
Invalidate
();
return
isValid
;
}
void
Scanner
::
PopAllSimpleKeys
()
{
while
(
!
m_simpleKeys
.
empty
())
m_simpleKeys
.
pop
();
}
}
src/singledocparser.cpp
View file @
3355bbb3
...
@@ -10,385 +10,394 @@
...
@@ -10,385 +10,394 @@
#include <cstdio>
#include <cstdio>
#include <algorithm>
#include <algorithm>
namespace
YAML
namespace
YAML
{
{
SingleDocParser
::
SingleDocParser
(
Scanner
&
scanner
,
const
Directives
&
directives
)
SingleDocParser
::
SingleDocParser
(
Scanner
&
scanner
,
const
Directives
&
directives
)
:
m_scanner
(
scanner
),
m_directives
(
directives
),
m_pCollectionStack
(
new
CollectionStack
),
m_curAnchor
(
0
)
:
m_scanner
(
scanner
),
{
m_directives
(
directives
),
}
m_pCollectionStack
(
new
CollectionStack
),
m_curAnchor
(
0
)
{}
SingleDocParser
::~
SingleDocParser
()
{
SingleDocParser
::~
SingleDocParser
()
{}
}
// HandleDocument
// HandleDocument
// . Handles the next document
// . Handles the next document
// . Throws a ParserException on error.
// . Throws a ParserException on error.
void
SingleDocParser
::
HandleDocument
(
EventHandler
&
eventHandler
)
{
void
SingleDocParser
::
HandleDocument
(
EventHandler
&
eventHandler
)
assert
(
!
m_scanner
.
empty
());
// guaranteed that there are tokens
{
assert
(
!
m_curAnchor
);
assert
(
!
m_scanner
.
empty
());
// guaranteed that there are tokens
assert
(
!
m_curAnchor
);
eventHandler
.
OnDocumentStart
(
m_scanner
.
peek
().
mark
);
eventHandler
.
OnDocumentStart
(
m_scanner
.
peek
().
mark
);
// eat doc start
if
(
m_scanner
.
peek
().
type
==
Token
::
DOC_START
)
// eat doc start
m_scanner
.
pop
();
if
(
m_scanner
.
peek
().
type
==
Token
::
DOC_START
)
m_scanner
.
pop
();
// recurse!
HandleNode
(
eventHandler
);
// recurse!
HandleNode
(
eventHandler
);
eventHandler
.
OnDocumentEnd
();
eventHandler
.
OnDocumentEnd
();
// and finally eat any doc ends we see
while
(
!
m_scanner
.
empty
()
&&
m_scanner
.
peek
().
type
==
Token
::
DOC_END
)
// and finally eat any doc ends we see
m_scanner
.
pop
();
while
(
!
m_scanner
.
empty
()
&&
m_scanner
.
peek
().
type
==
Token
::
DOC_END
)
}
m_scanner
.
pop
();
}
void
SingleDocParser
::
HandleNode
(
EventHandler
&
eventHandler
)
{
// an empty node *is* a possibility
void
SingleDocParser
::
HandleNode
(
EventHandler
&
eventHandler
)
if
(
m_scanner
.
empty
())
{
{
eventHandler
.
OnNull
(
m_scanner
.
mark
(),
NullAnchor
);
// an empty node *is* a possibility
return
;
if
(
m_scanner
.
empty
())
{
}
eventHandler
.
OnNull
(
m_scanner
.
mark
(),
NullAnchor
);
return
;
// save location
}
Mark
mark
=
m_scanner
.
peek
().
mark
;
// save location
// special case: a value node by itself must be a map, with no header
Mark
mark
=
m_scanner
.
peek
().
mark
;
if
(
m_scanner
.
peek
().
type
==
Token
::
VALUE
)
{
eventHandler
.
OnMapStart
(
mark
,
"?"
,
NullAnchor
);
// special case: a value node by itself must be a map, with no header
HandleMap
(
eventHandler
);
if
(
m_scanner
.
peek
().
type
==
Token
::
VALUE
)
{
eventHandler
.
OnMapEnd
();
eventHandler
.
OnMapStart
(
mark
,
"?"
,
NullAnchor
);
return
;
HandleMap
(
eventHandler
);
}
eventHandler
.
OnMapEnd
();
return
;
// special case: an alias node
}
if
(
m_scanner
.
peek
().
type
==
Token
::
ALIAS
)
{
eventHandler
.
OnAlias
(
mark
,
LookupAnchor
(
mark
,
m_scanner
.
peek
().
value
));
// special case: an alias node
m_scanner
.
pop
();
if
(
m_scanner
.
peek
().
type
==
Token
::
ALIAS
)
{
return
;
eventHandler
.
OnAlias
(
mark
,
LookupAnchor
(
mark
,
m_scanner
.
peek
().
value
));
}
m_scanner
.
pop
();
return
;
std
::
string
tag
;
}
anchor_t
anchor
;
ParseProperties
(
tag
,
anchor
);
std
::
string
tag
;
anchor_t
anchor
;
const
Token
&
token
=
m_scanner
.
peek
();
ParseProperties
(
tag
,
anchor
);
if
(
token
.
type
==
Token
::
PLAIN_SCALAR
&&
token
.
value
==
"null"
)
{
const
Token
&
token
=
m_scanner
.
peek
();
eventHandler
.
OnNull
(
mark
,
anchor
);
m_scanner
.
pop
();
if
(
token
.
type
==
Token
::
PLAIN_SCALAR
&&
token
.
value
==
"null"
)
{
return
;
eventHandler
.
OnNull
(
mark
,
anchor
);
}
m_scanner
.
pop
();
return
;
// add non-specific tags
}
if
(
tag
.
empty
())
tag
=
(
token
.
type
==
Token
::
NON_PLAIN_SCALAR
?
"!"
:
"?"
);
// add non-specific tags
if
(
tag
.
empty
())
// now split based on what kind of node we should be
tag
=
(
token
.
type
==
Token
::
NON_PLAIN_SCALAR
?
"!"
:
"?"
);
switch
(
token
.
type
)
{
case
Token
::
PLAIN_SCALAR
:
// now split based on what kind of node we should be
case
Token
::
NON_PLAIN_SCALAR
:
switch
(
token
.
type
)
{
eventHandler
.
OnScalar
(
mark
,
tag
,
anchor
,
token
.
value
);
case
Token
::
PLAIN_SCALAR
:
m_scanner
.
pop
();
case
Token
::
NON_PLAIN_SCALAR
:
return
;
eventHandler
.
OnScalar
(
mark
,
tag
,
anchor
,
token
.
value
);
case
Token
::
FLOW_SEQ_START
:
m_scanner
.
pop
();
case
Token
::
BLOCK_SEQ_START
:
return
;
eventHandler
.
OnSequenceStart
(
mark
,
tag
,
anchor
);
case
Token
::
FLOW_SEQ_START
:
HandleSequence
(
eventHandler
);
case
Token
::
BLOCK_SEQ_START
:
eventHandler
.
OnSequenceEnd
();
eventHandler
.
OnSequenceStart
(
mark
,
tag
,
anchor
);
return
;
HandleSequence
(
eventHandler
);
case
Token
::
FLOW_MAP_START
:
eventHandler
.
OnSequenceEnd
();
case
Token
::
BLOCK_MAP_START
:
return
;
eventHandler
.
OnMapStart
(
mark
,
tag
,
anchor
);
case
Token
::
FLOW_MAP_START
:
HandleMap
(
eventHandler
);
case
Token
::
BLOCK_MAP_START
:
eventHandler
.
OnMapEnd
();
eventHandler
.
OnMapStart
(
mark
,
tag
,
anchor
);
return
;
HandleMap
(
eventHandler
);
case
Token
::
KEY
:
eventHandler
.
OnMapEnd
();
// compact maps can only go in a flow sequence
return
;
if
(
m_pCollectionStack
->
GetCurCollectionType
()
==
case
Token
::
KEY
:
CollectionType
::
FlowSeq
)
{
// compact maps can only go in a flow sequence
eventHandler
.
OnMapStart
(
mark
,
tag
,
anchor
);
if
(
m_pCollectionStack
->
GetCurCollectionType
()
==
CollectionType
::
FlowSeq
)
{
HandleMap
(
eventHandler
);
eventHandler
.
OnMapStart
(
mark
,
tag
,
anchor
);
eventHandler
.
OnMapEnd
();
HandleMap
(
eventHandler
);
return
;
eventHandler
.
OnMapEnd
();
}
return
;
break
;
}
default:
break
;
break
;
default:
}
break
;
}
if
(
tag
==
"?"
)
eventHandler
.
OnNull
(
mark
,
anchor
);
if
(
tag
==
"?"
)
else
eventHandler
.
OnNull
(
mark
,
anchor
);
eventHandler
.
OnScalar
(
mark
,
tag
,
anchor
,
""
);
else
}
eventHandler
.
OnScalar
(
mark
,
tag
,
anchor
,
""
);
}
void
SingleDocParser
::
HandleSequence
(
EventHandler
&
eventHandler
)
{
// split based on start token
void
SingleDocParser
::
HandleSequence
(
EventHandler
&
eventHandler
)
switch
(
m_scanner
.
peek
().
type
)
{
{
case
Token
::
BLOCK_SEQ_START
:
// split based on start token
HandleBlockSequence
(
eventHandler
);
switch
(
m_scanner
.
peek
().
type
)
{
break
;
case
Token
::
BLOCK_SEQ_START
:
HandleBlockSequence
(
eventHandler
);
break
;
case
Token
::
FLOW_SEQ_START
:
case
Token
::
FLOW_SEQ_START
:
HandleFlowSequence
(
eventHandler
);
break
;
HandleFlowSequence
(
eventHandler
);
default:
break
;
break
;
}
default:
}
break
;
}
void
SingleDocParser
::
HandleBlockSequence
(
EventHandler
&
eventHandler
)
}
{
// eat start token
void
SingleDocParser
::
HandleBlockSequence
(
EventHandler
&
eventHandler
)
{
m_scanner
.
pop
();
// eat start token
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
BlockSeq
);
m_scanner
.
pop
();
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
BlockSeq
);
while
(
1
)
{
if
(
m_scanner
.
empty
())
while
(
1
)
{
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_SEQ
);
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_SEQ
);
Token
token
=
m_scanner
.
peek
();
if
(
token
.
type
!=
Token
::
BLOCK_ENTRY
&&
token
.
type
!=
Token
::
BLOCK_SEQ_END
)
Token
token
=
m_scanner
.
peek
();
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
END_OF_SEQ
);
if
(
token
.
type
!=
Token
::
BLOCK_ENTRY
&&
token
.
type
!=
Token
::
BLOCK_SEQ_END
)
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
END_OF_SEQ
);
m_scanner
.
pop
();
if
(
token
.
type
==
Token
::
BLOCK_SEQ_END
)
m_scanner
.
pop
();
break
;
if
(
token
.
type
==
Token
::
BLOCK_SEQ_END
)
break
;
// check for null
if
(
!
m_scanner
.
empty
())
{
// check for null
const
Token
&
token
=
m_scanner
.
peek
();
if
(
!
m_scanner
.
empty
())
{
if
(
token
.
type
==
Token
::
BLOCK_ENTRY
||
token
.
type
==
Token
::
BLOCK_SEQ_END
)
{
const
Token
&
token
=
m_scanner
.
peek
();
eventHandler
.
OnNull
(
token
.
mark
,
NullAnchor
);
if
(
token
.
type
==
Token
::
BLOCK_ENTRY
||
continue
;
token
.
type
==
Token
::
BLOCK_SEQ_END
)
{
}
eventHandler
.
OnNull
(
token
.
mark
,
NullAnchor
);
}
continue
;
}
HandleNode
(
eventHandler
);
}
}
HandleNode
(
eventHandler
);
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
BlockSeq
);
}
}
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
BlockSeq
);
void
SingleDocParser
::
HandleFlowSequence
(
EventHandler
&
eventHandler
)
}
{
// eat start token
void
SingleDocParser
::
HandleFlowSequence
(
EventHandler
&
eventHandler
)
{
m_scanner
.
pop
();
// eat start token
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
FlowSeq
);
m_scanner
.
pop
();
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
FlowSeq
);
while
(
1
)
{
if
(
m_scanner
.
empty
())
while
(
1
)
{
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_SEQ_FLOW
);
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_SEQ_FLOW
);
// first check for end
if
(
m_scanner
.
peek
().
type
==
Token
::
FLOW_SEQ_END
)
{
// first check for end
m_scanner
.
pop
();
if
(
m_scanner
.
peek
().
type
==
Token
::
FLOW_SEQ_END
)
{
break
;
m_scanner
.
pop
();
}
break
;
}
// then read the node
HandleNode
(
eventHandler
);
// then read the node
HandleNode
(
eventHandler
);
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_SEQ_FLOW
);
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_SEQ_FLOW
);
// now eat the separator (or could be a sequence end, which we ignore - but if it's neither, then it's a bad node)
Token
&
token
=
m_scanner
.
peek
();
// now eat the separator (or could be a sequence end, which we ignore - but
if
(
token
.
type
==
Token
::
FLOW_ENTRY
)
// if it's neither, then it's a bad node)
m_scanner
.
pop
();
Token
&
token
=
m_scanner
.
peek
();
else
if
(
token
.
type
!=
Token
::
FLOW_SEQ_END
)
if
(
token
.
type
==
Token
::
FLOW_ENTRY
)
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
END_OF_SEQ_FLOW
);
m_scanner
.
pop
();
}
else
if
(
token
.
type
!=
Token
::
FLOW_SEQ_END
)
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
END_OF_SEQ_FLOW
);
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
FlowSeq
);
}
}
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
FlowSeq
);
void
SingleDocParser
::
HandleMap
(
EventHandler
&
eventHandler
)
}
{
// split based on start token
void
SingleDocParser
::
HandleMap
(
EventHandler
&
eventHandler
)
{
switch
(
m_scanner
.
peek
().
type
)
{
// split based on start token
case
Token
::
BLOCK_MAP_START
:
HandleBlockMap
(
eventHandler
);
break
;
switch
(
m_scanner
.
peek
().
type
)
{
case
Token
::
FLOW_MAP_START
:
HandleFlowMap
(
eventHandler
);
break
;
case
Token
::
BLOCK_MAP_START
:
case
Token
::
KEY
:
HandleCompactMap
(
eventHandler
);
break
;
HandleBlockMap
(
eventHandler
);
case
Token
::
VALUE
:
HandleCompactMapWithNoKey
(
eventHandler
);
break
;
break
;
default:
break
;
case
Token
::
FLOW_MAP_START
:
}
HandleFlowMap
(
eventHandler
);
}
break
;
case
Token
::
KEY
:
void
SingleDocParser
::
HandleBlockMap
(
EventHandler
&
eventHandler
)
HandleCompactMap
(
eventHandler
);
{
break
;
// eat start token
case
Token
::
VALUE
:
m_scanner
.
pop
();
HandleCompactMapWithNoKey
(
eventHandler
);
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
BlockMap
);
break
;
default:
while
(
1
)
{
break
;
if
(
m_scanner
.
empty
())
}
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_MAP
);
}
Token
token
=
m_scanner
.
peek
();
void
SingleDocParser
::
HandleBlockMap
(
EventHandler
&
eventHandler
)
{
if
(
token
.
type
!=
Token
::
KEY
&&
token
.
type
!=
Token
::
VALUE
&&
token
.
type
!=
Token
::
BLOCK_MAP_END
)
// eat start token
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
END_OF_MAP
);
m_scanner
.
pop
();
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
BlockMap
);
if
(
token
.
type
==
Token
::
BLOCK_MAP_END
)
{
m_scanner
.
pop
();
while
(
1
)
{
break
;
if
(
m_scanner
.
empty
())
}
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_MAP
);
// grab key (if non-null)
Token
token
=
m_scanner
.
peek
();
if
(
token
.
type
==
Token
::
KEY
)
{
if
(
token
.
type
!=
Token
::
KEY
&&
token
.
type
!=
Token
::
VALUE
&&
m_scanner
.
pop
();
token
.
type
!=
Token
::
BLOCK_MAP_END
)
HandleNode
(
eventHandler
);
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
END_OF_MAP
);
}
else
{
eventHandler
.
OnNull
(
token
.
mark
,
NullAnchor
);
if
(
token
.
type
==
Token
::
BLOCK_MAP_END
)
{
}
m_scanner
.
pop
();
break
;
// now grab value (optional)
}
if
(
!
m_scanner
.
empty
()
&&
m_scanner
.
peek
().
type
==
Token
::
VALUE
)
{
m_scanner
.
pop
();
// grab key (if non-null)
HandleNode
(
eventHandler
);
if
(
token
.
type
==
Token
::
KEY
)
{
}
else
{
m_scanner
.
pop
();
eventHandler
.
OnNull
(
token
.
mark
,
NullAnchor
);
HandleNode
(
eventHandler
);
}
}
else
{
}
eventHandler
.
OnNull
(
token
.
mark
,
NullAnchor
);
}
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
BlockMap
);
}
// now grab value (optional)
if
(
!
m_scanner
.
empty
()
&&
m_scanner
.
peek
().
type
==
Token
::
VALUE
)
{
void
SingleDocParser
::
HandleFlowMap
(
EventHandler
&
eventHandler
)
m_scanner
.
pop
();
{
HandleNode
(
eventHandler
);
// eat start token
}
else
{
m_scanner
.
pop
();
eventHandler
.
OnNull
(
token
.
mark
,
NullAnchor
);
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
FlowMap
);
}
}
while
(
1
)
{
if
(
m_scanner
.
empty
())
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
BlockMap
);
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_MAP_FLOW
);
}
Token
&
token
=
m_scanner
.
peek
();
void
SingleDocParser
::
HandleFlowMap
(
EventHandler
&
eventHandler
)
{
const
Mark
mark
=
token
.
mark
;
// eat start token
// first check for end
m_scanner
.
pop
();
if
(
token
.
type
==
Token
::
FLOW_MAP_END
)
{
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
FlowMap
);
m_scanner
.
pop
();
break
;
while
(
1
)
{
}
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_MAP_FLOW
);
// grab key (if non-null)
if
(
token
.
type
==
Token
::
KEY
)
{
Token
&
token
=
m_scanner
.
peek
();
m_scanner
.
pop
();
const
Mark
mark
=
token
.
mark
;
HandleNode
(
eventHandler
);
// first check for end
}
else
{
if
(
token
.
type
==
Token
::
FLOW_MAP_END
)
{
eventHandler
.
OnNull
(
mark
,
NullAnchor
);
m_scanner
.
pop
();
}
break
;
}
// now grab value (optional)
if
(
!
m_scanner
.
empty
()
&&
m_scanner
.
peek
().
type
==
Token
::
VALUE
)
{
// grab key (if non-null)
m_scanner
.
pop
();
if
(
token
.
type
==
Token
::
KEY
)
{
HandleNode
(
eventHandler
);
m_scanner
.
pop
();
}
else
{
HandleNode
(
eventHandler
);
eventHandler
.
OnNull
(
mark
,
NullAnchor
);
}
else
{
}
eventHandler
.
OnNull
(
mark
,
NullAnchor
);
}
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_MAP_FLOW
);
// now grab value (optional)
if
(
!
m_scanner
.
empty
()
&&
m_scanner
.
peek
().
type
==
Token
::
VALUE
)
{
// now eat the separator (or could be a map end, which we ignore - but if it's neither, then it's a bad node)
m_scanner
.
pop
();
Token
&
nextToken
=
m_scanner
.
peek
();
HandleNode
(
eventHandler
);
if
(
nextToken
.
type
==
Token
::
FLOW_ENTRY
)
}
else
{
m_scanner
.
pop
();
eventHandler
.
OnNull
(
mark
,
NullAnchor
);
else
if
(
nextToken
.
type
!=
Token
::
FLOW_MAP_END
)
}
throw
ParserException
(
nextToken
.
mark
,
ErrorMsg
::
END_OF_MAP_FLOW
);
}
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_MAP_FLOW
);
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
FlowMap
);
}
// now eat the separator (or could be a map end, which we ignore - but if
// it's neither, then it's a bad node)
// . Single "key: value" pair in a flow sequence
Token
&
nextToken
=
m_scanner
.
peek
();
void
SingleDocParser
::
HandleCompactMap
(
EventHandler
&
eventHandler
)
if
(
nextToken
.
type
==
Token
::
FLOW_ENTRY
)
{
m_scanner
.
pop
();
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
CompactMap
);
else
if
(
nextToken
.
type
!=
Token
::
FLOW_MAP_END
)
throw
ParserException
(
nextToken
.
mark
,
ErrorMsg
::
END_OF_MAP_FLOW
);
// grab key
}
Mark
mark
=
m_scanner
.
peek
().
mark
;
m_scanner
.
pop
();
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
FlowMap
);
HandleNode
(
eventHandler
);
}
// now grab value (optional)
// . Single "key: value" pair in a flow sequence
if
(
!
m_scanner
.
empty
()
&&
m_scanner
.
peek
().
type
==
Token
::
VALUE
)
{
void
SingleDocParser
::
HandleCompactMap
(
EventHandler
&
eventHandler
)
{
m_scanner
.
pop
();
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
CompactMap
);
HandleNode
(
eventHandler
);
}
else
{
// grab key
eventHandler
.
OnNull
(
mark
,
NullAnchor
);
Mark
mark
=
m_scanner
.
peek
().
mark
;
}
m_scanner
.
pop
();
HandleNode
(
eventHandler
);
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
CompactMap
);
}
// now grab value (optional)
if
(
!
m_scanner
.
empty
()
&&
m_scanner
.
peek
().
type
==
Token
::
VALUE
)
{
// . Single ": value" pair in a flow sequence
m_scanner
.
pop
();
void
SingleDocParser
::
HandleCompactMapWithNoKey
(
EventHandler
&
eventHandler
)
HandleNode
(
eventHandler
);
{
}
else
{
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
CompactMap
);
eventHandler
.
OnNull
(
mark
,
NullAnchor
);
}
// null key
eventHandler
.
OnNull
(
m_scanner
.
peek
().
mark
,
NullAnchor
);
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
CompactMap
);
}
// grab value
m_scanner
.
pop
();
// . Single ": value" pair in a flow sequence
HandleNode
(
eventHandler
);
void
SingleDocParser
::
HandleCompactMapWithNoKey
(
EventHandler
&
eventHandler
)
{
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
CompactMap
);
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
CompactMap
);
}
// null key
eventHandler
.
OnNull
(
m_scanner
.
peek
().
mark
,
NullAnchor
);
// ParseProperties
// . Grabs any tag or anchor tokens and deals with them.
// grab value
void
SingleDocParser
::
ParseProperties
(
std
::
string
&
tag
,
anchor_t
&
anchor
)
m_scanner
.
pop
();
{
HandleNode
(
eventHandler
);
tag
.
clear
();
anchor
=
NullAnchor
;
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
CompactMap
);
}
while
(
1
)
{
if
(
m_scanner
.
empty
())
// ParseProperties
return
;
// . Grabs any tag or anchor tokens and deals with them.
void
SingleDocParser
::
ParseProperties
(
std
::
string
&
tag
,
anchor_t
&
anchor
)
{
switch
(
m_scanner
.
peek
().
type
)
{
tag
.
clear
();
case
Token
::
TAG
:
ParseTag
(
tag
);
break
;
anchor
=
NullAnchor
;
case
Token
::
ANCHOR
:
ParseAnchor
(
anchor
);
break
;
default:
return
;
while
(
1
)
{
}
if
(
m_scanner
.
empty
())
}
return
;
}
switch
(
m_scanner
.
peek
().
type
)
{
void
SingleDocParser
::
ParseTag
(
std
::
string
&
tag
)
case
Token
::
TAG
:
{
ParseTag
(
tag
);
Token
&
token
=
m_scanner
.
peek
();
break
;
if
(
!
tag
.
empty
())
case
Token
::
ANCHOR
:
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
MULTIPLE_TAGS
);
ParseAnchor
(
anchor
);
break
;
Tag
tagInfo
(
token
);
default:
tag
=
tagInfo
.
Translate
(
m_directives
);
return
;
m_scanner
.
pop
();
}
}
}
}
void
SingleDocParser
::
ParseAnchor
(
anchor_t
&
anchor
)
{
void
SingleDocParser
::
ParseTag
(
std
::
string
&
tag
)
{
Token
&
token
=
m_scanner
.
peek
();
Token
&
token
=
m_scanner
.
peek
();
if
(
anchor
)
if
(
!
tag
.
empty
())
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
MULTIPLE_ANCHORS
);
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
MULTIPLE_TAGS
);
anchor
=
RegisterAnchor
(
token
.
value
);
Tag
tagInfo
(
token
);
m_scanner
.
pop
();
tag
=
tagInfo
.
Translate
(
m_directives
);
}
m_scanner
.
pop
();
}
anchor_t
SingleDocParser
::
RegisterAnchor
(
const
std
::
string
&
name
)
{
void
SingleDocParser
::
ParseAnchor
(
anchor_t
&
anchor
)
{
if
(
name
.
empty
())
Token
&
token
=
m_scanner
.
peek
();
return
NullAnchor
;
if
(
anchor
)
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
MULTIPLE_ANCHORS
);
return
m_anchors
[
name
]
=
++
m_curAnchor
;
}
anchor
=
RegisterAnchor
(
token
.
value
);
m_scanner
.
pop
();
anchor_t
SingleDocParser
::
LookupAnchor
(
const
Mark
&
mark
,
const
std
::
string
&
name
)
const
}
{
Anchors
::
const_iterator
it
=
m_anchors
.
find
(
name
);
anchor_t
SingleDocParser
::
RegisterAnchor
(
const
std
::
string
&
name
)
{
if
(
it
==
m_anchors
.
end
())
if
(
name
.
empty
())
throw
ParserException
(
mark
,
ErrorMsg
::
UNKNOWN_ANCHOR
);
return
NullAnchor
;
return
it
->
second
;
return
m_anchors
[
name
]
=
++
m_curAnchor
;
}
}
anchor_t
SingleDocParser
::
LookupAnchor
(
const
Mark
&
mark
,
const
std
::
string
&
name
)
const
{
Anchors
::
const_iterator
it
=
m_anchors
.
find
(
name
);
if
(
it
==
m_anchors
.
end
())
throw
ParserException
(
mark
,
ErrorMsg
::
UNKNOWN_ANCHOR
);
return
it
->
second
;
}
}
}
src/singledocparser.h
View file @
3355bbb3
#ifndef SINGLEDOCPARSER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#ifndef SINGLEDOCPARSER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define SINGLEDOCPARSER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define SINGLEDOCPARSER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#pragma once
#endif
#endif
#include "yaml-cpp/anchor.h"
#include "yaml-cpp/anchor.h"
#include "yaml-cpp/noncopyable.h"
#include "yaml-cpp/noncopyable.h"
#include <string>
#include <string>
#include <map>
#include <map>
#include <memory>
#include <memory>
namespace
YAML
namespace
YAML
{
{
struct
Directives
;
struct
Directives
;
struct
Mark
;
struct
Mark
;
struct
Token
;
struct
Token
;
class
CollectionStack
;
class
CollectionStack
;
class
EventHandler
;
class
EventHandler
;
class
Node
;
class
Node
;
class
Scanner
;
class
Scanner
;
class
SingleDocParser
:
private
noncopyable
{
class
SingleDocParser
:
private
noncopyable
public:
{
SingleDocParser
(
Scanner
&
scanner
,
const
Directives
&
directives
);
public:
~
SingleDocParser
();
SingleDocParser
(
Scanner
&
scanner
,
const
Directives
&
directives
);
~
SingleDocParser
();
void
HandleDocument
(
EventHandler
&
eventHandler
);
private:
void
HandleNode
(
EventHandler
&
eventHandler
);
void
HandleSequence
(
EventHandler
&
eventHandler
);
void
HandleBlockSequence
(
EventHandler
&
eventHandler
);
void
HandleFlowSequence
(
EventHandler
&
eventHandler
);
void
HandleMap
(
EventHandler
&
eventHandler
);
void
HandleBlockMap
(
EventHandler
&
eventHandler
);
void
HandleFlowMap
(
EventHandler
&
eventHandler
);
void
HandleCompactMap
(
EventHandler
&
eventHandler
);
void
HandleCompactMapWithNoKey
(
EventHandler
&
eventHandler
);
void
ParseProperties
(
std
::
string
&
tag
,
anchor_t
&
anchor
);
void
ParseTag
(
std
::
string
&
tag
);
void
ParseAnchor
(
anchor_t
&
anchor
);
anchor_t
RegisterAnchor
(
const
std
::
string
&
name
);
anchor_t
LookupAnchor
(
const
Mark
&
mark
,
const
std
::
string
&
name
)
const
;
private:
Scanner
&
m_scanner
;
const
Directives
&
m_directives
;
std
::
auto_ptr
<
CollectionStack
>
m_pCollectionStack
;
void
HandleDocument
(
EventHandler
&
eventHandler
);
typedef
std
::
map
<
std
::
string
,
anchor_t
>
Anchors
;
Anchors
m_anchors
;
private:
anchor_t
m_curAnchor
;
void
HandleNode
(
EventHandler
&
eventHandler
);
};
void
HandleSequence
(
EventHandler
&
eventHandler
);
void
HandleBlockSequence
(
EventHandler
&
eventHandler
);
void
HandleFlowSequence
(
EventHandler
&
eventHandler
);
void
HandleMap
(
EventHandler
&
eventHandler
);
void
HandleBlockMap
(
EventHandler
&
eventHandler
);
void
HandleFlowMap
(
EventHandler
&
eventHandler
);
void
HandleCompactMap
(
EventHandler
&
eventHandler
);
void
HandleCompactMapWithNoKey
(
EventHandler
&
eventHandler
);
void
ParseProperties
(
std
::
string
&
tag
,
anchor_t
&
anchor
);
void
ParseTag
(
std
::
string
&
tag
);
void
ParseAnchor
(
anchor_t
&
anchor
);
anchor_t
RegisterAnchor
(
const
std
::
string
&
name
);
anchor_t
LookupAnchor
(
const
Mark
&
mark
,
const
std
::
string
&
name
)
const
;
private:
Scanner
&
m_scanner
;
const
Directives
&
m_directives
;
std
::
auto_ptr
<
CollectionStack
>
m_pCollectionStack
;
typedef
std
::
map
<
std
::
string
,
anchor_t
>
Anchors
;
Anchors
m_anchors
;
anchor_t
m_curAnchor
;
};
}
}
#endif // SINGLEDOCPARSER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif
// SINGLEDOCPARSER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/stream.cpp
View file @
3355bbb3
...
@@ -6,442 +6,439 @@
...
@@ -6,442 +6,439 @@
#define YAML_PREFETCH_SIZE 2048
#define YAML_PREFETCH_SIZE 2048
#endif
#endif
#define S_ARRAY_SIZE(
A
) (sizeof(A)
/
sizeof(*(A)))
#define S_ARRAY_SIZE(
A
) (sizeof(A)
/
sizeof(*(A)))
#define S_ARRAY_END(
A
) ((A) + S_ARRAY_SIZE(A))
#define S_ARRAY_END(
A
) ((A) + S_ARRAY_SIZE(A))
#define CP_REPLACEMENT_CHARACTER (0xFFFD)
#define CP_REPLACEMENT_CHARACTER (0xFFFD)
namespace
YAML
namespace
YAML
{
{
enum
UtfIntroState
{
enum
UtfIntroState
{
uis_start
,
uis_start
,
uis_utfbe_b1
,
uis_utfbe_b1
,
uis_utf32be_b2
,
uis_utf32be_b2
,
uis_utf32be_bom3
,
uis_utf32be_bom3
,
uis_utf32be
,
uis_utf32be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be_bom1
,
uis_utf16be_bom1
,
uis_utfle_bom1
,
uis_utfle_bom1
,
uis_utf16le_bom2
,
uis_utf16le_bom2
,
uis_utf32le_bom3
,
uis_utf32le_bom3
,
uis_utf16le
,
uis_utf16le
,
uis_utf32le
,
uis_utf32le
,
uis_utf8_imp
,
uis_utf8_imp
,
uis_utf16le_imp
,
uis_utf16le_imp
,
uis_utf32le_imp3
,
uis_utf32le_imp3
,
uis_utf8_bom1
,
uis_utf8_bom1
,
uis_utf8_bom2
,
uis_utf8_bom2
,
uis_utf8
,
uis_utf8
,
uis_error
uis_error
};
};
enum
UtfIntroCharType
{
enum
UtfIntroCharType
{
uict00
,
uict00
,
uictBB
,
uictBB
,
uictBF
,
uictBF
,
uictEF
,
uictEF
,
uictFE
,
uictFE
,
uictFF
,
uictFF
,
uictAscii
,
uictAscii
,
uictOther
,
uictOther
,
uictMax
uictMax
};
};
static
bool
s_introFinalState
[]
=
{
false
,
// uis_start
static
bool
s_introFinalState
[]
=
{
false
,
// uis_utfbe_b1
false
,
//uis_start
false
,
// uis_utf32be_b2
false
,
//uis_utfbe_b1
false
,
// uis_utf32be_bom3
false
,
//uis_utf32be_b2
true
,
// uis_utf32be
false
,
//uis_utf32be_bom3
true
,
// uis_utf16be
true
,
//uis_utf32be
false
,
// uis_utf16be_bom1
true
,
//uis_utf16be
false
,
// uis_utfle_bom1
false
,
//uis_utf16be_bom1
false
,
// uis_utf16le_bom2
false
,
//uis_utfle_bom1
false
,
// uis_utf32le_bom3
false
,
//uis_utf16le_bom2
true
,
// uis_utf16le
false
,
//uis_utf32le_bom3
true
,
// uis_utf32le
true
,
//uis_utf16le
false
,
// uis_utf8_imp
true
,
//uis_utf32le
false
,
// uis_utf16le_imp
false
,
//uis_utf8_imp
false
,
// uis_utf32le_imp3
false
,
//uis_utf16le_imp
false
,
// uis_utf8_bom1
false
,
//uis_utf32le_imp3
false
,
// uis_utf8_bom2
false
,
//uis_utf8_bom1
true
,
// uis_utf8
false
,
//uis_utf8_bom2
true
,
// uis_error
true
,
//uis_utf8
};
true
,
//uis_error
};
static
UtfIntroState
s_introTransitions
[][
uictMax
]
=
{
// uict00, uictBB, uictBF, uictEF,
static
UtfIntroState
s_introTransitions
[][
uictMax
]
=
{
// uictFE, uictFF, uictAscii, uictOther
// uict00, uictBB, uictBF, uictEF, uictFE, uictFF, uictAscii, uictOther
{
uis_utfbe_b1
,
uis_utf8
,
uis_utf8
,
uis_utf8_bom1
,
{
uis_utfbe_b1
,
uis_utf8
,
uis_utf8
,
uis_utf8_bom1
,
uis_utf16be_bom1
,
uis_utfle_bom1
,
uis_utf8_imp
,
uis_utf8
},
uis_utf16be_bom1
,
uis_utfle_bom1
,
uis_utf8_imp
,
uis_utf8
},
{
uis_utf32be_b2
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf16be
,
uis_utf8
},
{
uis_utf32be_b2
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
{
uis_utf32be
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf32be_bom3
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
uis_utf8
,
uis_utf8
,
uis_utf16be
,
uis_utf8
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf32be
,
uis_utf8
,
uis_utf8
},
{
uis_utf32be
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
{
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
uis_utf32be
},
uis_utf32be_bom3
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf16be
,
uis_utf8
,
uis_utf8
},
uis_utf8
,
uis_utf32be
,
uis_utf8
,
uis_utf8
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf16le_bom2
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
{
uis_utf32le_bom3
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
uis_utf32be
},
{
uis_utf32le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
{
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
{
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be
},
{
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
uis_utf32le
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
{
uis_utf16le_imp
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
uis_utf8
,
uis_utf16be
,
uis_utf8
,
uis_utf8
},
{
uis_utf32le_imp3
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
{
uis_utf32le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
uis_utf16le_bom2
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
uis_utf8
,
uis_utf8_bom2
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
uis_utf32le_bom3
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
uis_utf32le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
};
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
{
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
static
char
s_introUngetCount
[][
uictMax
]
=
{
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
// uict00, uictBB, uictBF, uictEF, uictFE, uictFF, uictAscii, uictOther
{
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
{
0
,
1
,
1
,
0
,
0
,
0
,
0
,
1
},
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
uis_utf32le
},
{
0
,
2
,
2
,
2
,
2
,
2
,
2
,
2
},
{
uis_utf16le_imp
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
{
3
,
3
,
3
,
3
,
0
,
3
,
3
,
3
},
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
4
,
4
,
4
,
4
,
4
,
0
,
4
,
4
},
{
uis_utf32le_imp3
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
{
uis_utf32le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
{
2
,
2
,
2
,
2
,
2
,
0
,
2
,
2
},
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
{
2
,
2
,
2
,
2
,
0
,
2
,
2
,
2
},
{
uis_utf8
,
uis_utf8_bom2
,
uis_utf8
,
uis_utf8
,
{
0
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
0
,
2
,
2
,
2
,
2
,
2
,
2
,
2
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
{
0
,
2
,
2
,
2
,
2
,
2
,
2
,
2
},
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
};
{
0
,
3
,
3
,
3
,
3
,
3
,
3
,
3
},
{
4
,
4
,
4
,
4
,
4
,
4
,
4
,
4
},
static
char
s_introUngetCount
[][
uictMax
]
=
{
{
2
,
0
,
2
,
2
,
2
,
2
,
2
,
2
},
// uict00, uictBB, uictBF, uictEF, uictFE, uictFF, uictAscii, uictOther
{
3
,
3
,
0
,
3
,
3
,
3
,
3
,
3
},
{
0
,
1
,
1
,
0
,
0
,
0
,
0
,
1
},
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
{
0
,
2
,
2
,
2
,
2
,
2
,
2
,
2
},
};
{
3
,
3
,
3
,
3
,
0
,
3
,
3
,
3
},
{
4
,
4
,
4
,
4
,
4
,
0
,
4
,
4
},
inline
UtfIntroCharType
IntroCharTypeOf
(
std
::
istream
::
int_type
ch
)
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
{
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
if
(
std
::
istream
::
traits_type
::
eof
()
==
ch
)
{
{
2
,
2
,
2
,
2
,
2
,
0
,
2
,
2
},
return
uictOther
;
{
2
,
2
,
2
,
2
,
0
,
2
,
2
,
2
},
}
{
0
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
{
0
,
2
,
2
,
2
,
2
,
2
,
2
,
2
},
switch
(
ch
)
{
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
case
0
:
return
uict00
;
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
case
0xBB
:
return
uictBB
;
{
0
,
2
,
2
,
2
,
2
,
2
,
2
,
2
},
case
0xBF
:
return
uictBF
;
{
0
,
3
,
3
,
3
,
3
,
3
,
3
,
3
},
case
0xEF
:
return
uictEF
;
{
4
,
4
,
4
,
4
,
4
,
4
,
4
,
4
},
case
0xFE
:
return
uictFE
;
{
2
,
0
,
2
,
2
,
2
,
2
,
2
,
2
},
case
0xFF
:
return
uictFF
;
{
3
,
3
,
0
,
3
,
3
,
3
,
3
,
3
},
}
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
};
if
((
ch
>
0
)
&&
(
ch
<
0xFF
))
{
inline
UtfIntroCharType
IntroCharTypeOf
(
std
::
istream
::
int_type
ch
)
{
return
uictAscii
;
if
(
std
::
istream
::
traits_type
::
eof
()
==
ch
)
{
}
return
uictOther
;
}
return
uictOther
;
}
switch
(
ch
)
{
case
0
:
inline
char
Utf8Adjust
(
unsigned
long
ch
,
unsigned
char
lead_bits
,
unsigned
char
rshift
)
return
uict00
;
{
case
0xBB
:
const
unsigned
char
header
=
((
1
<<
lead_bits
)
-
1
)
<<
(
8
-
lead_bits
);
return
uictBB
;
const
unsigned
char
mask
=
(
0xFF
>>
(
lead_bits
+
1
));
case
0xBF
:
return
static_cast
<
char
>
(
static_cast
<
unsigned
char
>
(
return
uictBF
;
header
|
((
ch
>>
rshift
)
&
mask
)
case
0xEF
:
));
return
uictEF
;
}
case
0xFE
:
return
uictFE
;
inline
void
QueueUnicodeCodepoint
(
std
::
deque
<
char
>&
q
,
unsigned
long
ch
)
case
0xFF
:
{
return
uictFF
;
// We are not allowed to queue the Stream::eof() codepoint, so
}
// replace it with CP_REPLACEMENT_CHARACTER
if
(
static_cast
<
unsigned
long
>
(
Stream
::
eof
())
==
ch
)
if
((
ch
>
0
)
&&
(
ch
<
0xFF
))
{
{
return
uictAscii
;
ch
=
CP_REPLACEMENT_CHARACTER
;
}
}
return
uictOther
;
if
(
ch
<
0x80
)
}
{
q
.
push_back
(
Utf8Adjust
(
ch
,
0
,
0
));
inline
char
Utf8Adjust
(
unsigned
long
ch
,
unsigned
char
lead_bits
,
}
unsigned
char
rshift
)
{
else
if
(
ch
<
0x800
)
const
unsigned
char
header
=
((
1
<<
lead_bits
)
-
1
)
<<
(
8
-
lead_bits
);
{
const
unsigned
char
mask
=
(
0xFF
>>
(
lead_bits
+
1
));
q
.
push_back
(
Utf8Adjust
(
ch
,
2
,
6
));
return
static_cast
<
char
>
(
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
0
));
static_cast
<
unsigned
char
>
(
header
|
((
ch
>>
rshift
)
&
mask
)));
}
}
else
if
(
ch
<
0x10000
)
{
inline
void
QueueUnicodeCodepoint
(
std
::
deque
<
char
>&
q
,
unsigned
long
ch
)
{
q
.
push_back
(
Utf8Adjust
(
ch
,
3
,
12
));
// We are not allowed to queue the Stream::eof() codepoint, so
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
6
));
// replace it with CP_REPLACEMENT_CHARACTER
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
0
));
if
(
static_cast
<
unsigned
long
>
(
Stream
::
eof
())
==
ch
)
{
}
ch
=
CP_REPLACEMENT_CHARACTER
;
else
}
{
q
.
push_back
(
Utf8Adjust
(
ch
,
4
,
18
));
if
(
ch
<
0x80
)
{
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
12
));
q
.
push_back
(
Utf8Adjust
(
ch
,
0
,
0
));
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
6
));
}
else
if
(
ch
<
0x800
)
{
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
0
));
q
.
push_back
(
Utf8Adjust
(
ch
,
2
,
6
));
}
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
0
));
}
}
else
if
(
ch
<
0x10000
)
{
q
.
push_back
(
Utf8Adjust
(
ch
,
3
,
12
));
Stream
::
Stream
(
std
::
istream
&
input
)
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
6
));
:
m_input
(
input
),
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
0
));
m_pPrefetched
(
new
unsigned
char
[
YAML_PREFETCH_SIZE
]),
}
else
{
m_nPrefetchedAvailable
(
0
),
m_nPrefetchedUsed
(
0
)
q
.
push_back
(
Utf8Adjust
(
ch
,
4
,
18
));
{
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
12
));
typedef
std
::
istream
::
traits_type
char_traits
;
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
6
));
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
0
));
if
(
!
input
)
}
return
;
}
// Determine (or guess) the character-set by reading the BOM, if any. See
Stream
::
Stream
(
std
::
istream
&
input
)
// the YAML specification for the determination algorithm.
:
m_input
(
input
),
char_traits
::
int_type
intro
[
4
];
m_pPrefetched
(
new
unsigned
char
[
YAML_PREFETCH_SIZE
]),
int
nIntroUsed
=
0
;
m_nPrefetchedAvailable
(
0
),
UtfIntroState
state
=
uis_start
;
m_nPrefetchedUsed
(
0
)
{
for
(;
!
s_introFinalState
[
state
];
)
{
typedef
std
::
istream
::
traits_type
char_traits
;
std
::
istream
::
int_type
ch
=
input
.
get
();
intro
[
nIntroUsed
++
]
=
ch
;
if
(
!
input
)
UtfIntroCharType
charType
=
IntroCharTypeOf
(
ch
);
return
;
UtfIntroState
newState
=
s_introTransitions
[
state
][
charType
];
int
nUngets
=
s_introUngetCount
[
state
][
charType
];
// Determine (or guess) the character-set by reading the BOM, if any. See
if
(
nUngets
>
0
)
{
// the YAML specification for the determination algorithm.
input
.
clear
();
char_traits
::
int_type
intro
[
4
];
for
(;
nUngets
>
0
;
--
nUngets
)
{
int
nIntroUsed
=
0
;
if
(
char_traits
::
eof
()
!=
intro
[
--
nIntroUsed
])
UtfIntroState
state
=
uis_start
;
input
.
putback
(
char_traits
::
to_char_type
(
intro
[
nIntroUsed
]));
for
(;
!
s_introFinalState
[
state
];)
{
}
std
::
istream
::
int_type
ch
=
input
.
get
();
}
intro
[
nIntroUsed
++
]
=
ch
;
state
=
newState
;
UtfIntroCharType
charType
=
IntroCharTypeOf
(
ch
);
}
UtfIntroState
newState
=
s_introTransitions
[
state
][
charType
];
int
nUngets
=
s_introUngetCount
[
state
][
charType
];
switch
(
state
)
{
if
(
nUngets
>
0
)
{
case
uis_utf8
:
m_charSet
=
utf8
;
break
;
input
.
clear
();
case
uis_utf16le
:
m_charSet
=
utf16le
;
break
;
for
(;
nUngets
>
0
;
--
nUngets
)
{
case
uis_utf16be
:
m_charSet
=
utf16be
;
break
;
if
(
char_traits
::
eof
()
!=
intro
[
--
nIntroUsed
])
case
uis_utf32le
:
m_charSet
=
utf32le
;
break
;
input
.
putback
(
char_traits
::
to_char_type
(
intro
[
nIntroUsed
]));
case
uis_utf32be
:
m_charSet
=
utf32be
;
break
;
}
default:
m_charSet
=
utf8
;
break
;
}
}
state
=
newState
;
}
ReadAheadTo
(
0
);
}
switch
(
state
)
{
case
uis_utf8
:
Stream
::~
Stream
()
m_charSet
=
utf8
;
{
break
;
delete
[]
m_pPrefetched
;
case
uis_utf16le
:
}
m_charSet
=
utf16le
;
break
;
char
Stream
::
peek
()
const
case
uis_utf16be
:
{
m_charSet
=
utf16be
;
if
(
m_readahead
.
empty
())
break
;
{
case
uis_utf32le
:
return
Stream
::
eof
();
m_charSet
=
utf32le
;
}
break
;
case
uis_utf32be
:
return
m_readahead
[
0
];
m_charSet
=
utf32be
;
}
break
;
default:
Stream
::
operator
bool
()
const
m_charSet
=
utf8
;
{
break
;
return
m_input
.
good
()
||
(
!
m_readahead
.
empty
()
&&
m_readahead
[
0
]
!=
Stream
::
eof
());
}
}
ReadAheadTo
(
0
);
// get
}
// . Extracts a character from the stream and updates our position
char
Stream
::
get
()
Stream
::~
Stream
()
{
delete
[]
m_pPrefetched
;
}
{
char
ch
=
peek
();
char
Stream
::
peek
()
const
{
AdvanceCurrent
();
if
(
m_readahead
.
empty
())
{
m_mark
.
column
++
;
return
Stream
::
eof
();
}
if
(
ch
==
'\n'
)
{
m_mark
.
column
=
0
;
return
m_readahead
[
0
];
m_mark
.
line
++
;
}
}
Stream
::
operator
bool
()
const
{
return
ch
;
return
m_input
.
good
()
||
}
(
!
m_readahead
.
empty
()
&&
m_readahead
[
0
]
!=
Stream
::
eof
());
}
// get
// . Extracts 'n' characters from the stream and updates our position
// get
std
::
string
Stream
::
get
(
int
n
)
// . Extracts a character from the stream and updates our position
{
char
Stream
::
get
()
{
std
::
string
ret
;
char
ch
=
peek
();
ret
.
reserve
(
n
);
AdvanceCurrent
();
for
(
int
i
=
0
;
i
<
n
;
i
++
)
m_mark
.
column
++
;
ret
+=
get
();
return
ret
;
if
(
ch
==
'\n'
)
{
}
m_mark
.
column
=
0
;
m_mark
.
line
++
;
// eat
}
// . Eats 'n' characters and updates our position.
void
Stream
::
eat
(
int
n
)
return
ch
;
{
}
for
(
int
i
=
0
;
i
<
n
;
i
++
)
get
();
// get
}
// . Extracts 'n' characters from the stream and updates our position
std
::
string
Stream
::
get
(
int
n
)
{
void
Stream
::
AdvanceCurrent
()
std
::
string
ret
;
{
ret
.
reserve
(
n
);
if
(
!
m_readahead
.
empty
())
for
(
int
i
=
0
;
i
<
n
;
i
++
)
{
ret
+=
get
();
m_readahead
.
pop_front
();
return
ret
;
m_mark
.
pos
++
;
}
}
// eat
ReadAheadTo
(
0
);
// . Eats 'n' characters and updates our position.
}
void
Stream
::
eat
(
int
n
)
{
for
(
int
i
=
0
;
i
<
n
;
i
++
)
bool
Stream
::
_ReadAheadTo
(
size_t
i
)
const
get
();
{
}
while
(
m_input
.
good
()
&&
(
m_readahead
.
size
()
<=
i
))
{
void
Stream
::
AdvanceCurrent
()
{
switch
(
m_charSet
)
if
(
!
m_readahead
.
empty
())
{
{
m_readahead
.
pop_front
();
case
utf8
:
StreamInUtf8
();
break
;
m_mark
.
pos
++
;
case
utf16le
:
StreamInUtf16
();
break
;
}
case
utf16be
:
StreamInUtf16
();
break
;
case
utf32le
:
StreamInUtf32
();
break
;
ReadAheadTo
(
0
);
case
utf32be
:
StreamInUtf32
();
break
;
}
}
}
bool
Stream
::
_ReadAheadTo
(
size_t
i
)
const
{
while
(
m_input
.
good
()
&&
(
m_readahead
.
size
()
<=
i
))
{
// signal end of stream
switch
(
m_charSet
)
{
if
(
!
m_input
.
good
())
case
utf8
:
m_readahead
.
push_back
(
Stream
::
eof
());
StreamInUtf8
();
break
;
return
m_readahead
.
size
()
>
i
;
case
utf16le
:
}
StreamInUtf16
();
break
;
void
Stream
::
StreamInUtf8
()
const
case
utf16be
:
{
StreamInUtf16
();
unsigned
char
b
=
GetNextByte
();
break
;
if
(
m_input
.
good
())
case
utf32le
:
{
StreamInUtf32
();
m_readahead
.
push_back
(
b
);
break
;
}
case
utf32be
:
}
StreamInUtf32
();
break
;
void
Stream
::
StreamInUtf16
()
const
}
{
}
unsigned
long
ch
=
0
;
unsigned
char
bytes
[
2
];
// signal end of stream
int
nBigEnd
=
(
m_charSet
==
utf16be
)
?
0
:
1
;
if
(
!
m_input
.
good
())
m_readahead
.
push_back
(
Stream
::
eof
());
bytes
[
0
]
=
GetNextByte
();
bytes
[
1
]
=
GetNextByte
();
return
m_readahead
.
size
()
>
i
;
if
(
!
m_input
.
good
())
}
{
return
;
void
Stream
::
StreamInUtf8
()
const
{
}
unsigned
char
b
=
GetNextByte
();
ch
=
(
static_cast
<
unsigned
long
>
(
bytes
[
nBigEnd
])
<<
8
)
|
if
(
m_input
.
good
())
{
static_cast
<
unsigned
long
>
(
bytes
[
1
^
nBigEnd
]);
m_readahead
.
push_back
(
b
);
}
if
(
ch
>=
0xDC00
&&
ch
<
0xE000
)
}
{
// Trailing (low) surrogate...ugh, wrong order
void
Stream
::
StreamInUtf16
()
const
{
QueueUnicodeCodepoint
(
m_readahead
,
CP_REPLACEMENT_CHARACTER
);
unsigned
long
ch
=
0
;
return
;
unsigned
char
bytes
[
2
];
}
int
nBigEnd
=
(
m_charSet
==
utf16be
)
?
0
:
1
;
else
if
(
ch
>=
0xD800
&&
ch
<
0xDC00
)
{
bytes
[
0
]
=
GetNextByte
();
// ch is a leading (high) surrogate
bytes
[
1
]
=
GetNextByte
();
if
(
!
m_input
.
good
())
{
// Four byte UTF-8 code point
return
;
}
// Read the trailing (low) surrogate
ch
=
(
static_cast
<
unsigned
long
>
(
bytes
[
nBigEnd
])
<<
8
)
|
for
(;;)
static_cast
<
unsigned
long
>
(
bytes
[
1
^
nBigEnd
]);
{
bytes
[
0
]
=
GetNextByte
();
if
(
ch
>=
0xDC00
&&
ch
<
0xE000
)
{
bytes
[
1
]
=
GetNextByte
();
// Trailing (low) surrogate...ugh, wrong order
if
(
!
m_input
.
good
())
QueueUnicodeCodepoint
(
m_readahead
,
CP_REPLACEMENT_CHARACTER
);
{
return
;
QueueUnicodeCodepoint
(
m_readahead
,
CP_REPLACEMENT_CHARACTER
);
}
else
if
(
ch
>=
0xD800
&&
ch
<
0xDC00
)
{
return
;
// ch is a leading (high) surrogate
}
unsigned
long
chLow
=
(
static_cast
<
unsigned
long
>
(
bytes
[
nBigEnd
])
<<
8
)
|
// Four byte UTF-8 code point
static_cast
<
unsigned
long
>
(
bytes
[
1
^
nBigEnd
]);
if
(
chLow
<
0xDC00
||
ch
>=
0xE000
)
// Read the trailing (low) surrogate
{
for
(;;)
{
// Trouble...not a low surrogate. Dump a REPLACEMENT CHARACTER into the stream.
bytes
[
0
]
=
GetNextByte
();
QueueUnicodeCodepoint
(
m_readahead
,
CP_REPLACEMENT_CHARACTER
);
bytes
[
1
]
=
GetNextByte
();
if
(
!
m_input
.
good
())
{
// Deal with the next UTF-16 unit
QueueUnicodeCodepoint
(
m_readahead
,
CP_REPLACEMENT_CHARACTER
);
if
(
chLow
<
0xD800
||
ch
>=
0xE000
)
return
;
{
}
// Easiest case: queue the codepoint and return
unsigned
long
chLow
=
(
static_cast
<
unsigned
long
>
(
bytes
[
nBigEnd
])
<<
8
)
|
QueueUnicodeCodepoint
(
m_readahead
,
ch
);
static_cast
<
unsigned
long
>
(
bytes
[
1
^
nBigEnd
]);
return
;
if
(
chLow
<
0xDC00
||
ch
>=
0xE000
)
{
}
// Trouble...not a low surrogate. Dump a REPLACEMENT CHARACTER into the
else
// stream.
{
QueueUnicodeCodepoint
(
m_readahead
,
CP_REPLACEMENT_CHARACTER
);
// Start the loop over with the new high surrogate
ch
=
chLow
;
// Deal with the next UTF-16 unit
continue
;
if
(
chLow
<
0xD800
||
ch
>=
0xE000
)
{
}
// Easiest case: queue the codepoint and return
}
QueueUnicodeCodepoint
(
m_readahead
,
ch
);
return
;
// Select the payload bits from the high surrogate
}
else
{
ch
&=
0x3FF
;
// Start the loop over with the new high surrogate
ch
<<=
10
;
ch
=
chLow
;
continue
;
// Include bits from low surrogate
}
ch
|=
(
chLow
&
0x3FF
);
}
// Add the surrogacy offset
// Select the payload bits from the high surrogate
ch
+=
0x10000
;
ch
&=
0x3FF
;
}
ch
<<=
10
;
}
// Include bits from low surrogate
QueueUnicodeCodepoint
(
m_readahead
,
ch
);
ch
|=
(
chLow
&
0x3FF
);
}
// Add the surrogacy offset
inline
char
*
ReadBuffer
(
unsigned
char
*
pBuffer
)
ch
+=
0x10000
;
{
}
return
reinterpret_cast
<
char
*>
(
pBuffer
);
}
}
QueueUnicodeCodepoint
(
m_readahead
,
ch
);
unsigned
char
Stream
::
GetNextByte
()
const
}
{
if
(
m_nPrefetchedUsed
>=
m_nPrefetchedAvailable
)
inline
char
*
ReadBuffer
(
unsigned
char
*
pBuffer
)
{
{
return
reinterpret_cast
<
char
*>
(
pBuffer
);
std
::
streambuf
*
pBuf
=
m_input
.
rdbuf
();
}
m_nPrefetchedAvailable
=
static_cast
<
std
::
size_t
>
(
pBuf
->
sgetn
(
ReadBuffer
(
m_pPrefetched
),
YAML_PREFETCH_SIZE
));
m_nPrefetchedUsed
=
0
;
unsigned
char
Stream
::
GetNextByte
()
const
{
if
(
!
m_nPrefetchedAvailable
)
if
(
m_nPrefetchedUsed
>=
m_nPrefetchedAvailable
)
{
{
std
::
streambuf
*
pBuf
=
m_input
.
rdbuf
();
m_input
.
setstate
(
std
::
ios_base
::
eofbit
);
m_nPrefetchedAvailable
=
static_cast
<
std
::
size_t
>
(
}
pBuf
->
sgetn
(
ReadBuffer
(
m_pPrefetched
),
YAML_PREFETCH_SIZE
));
m_nPrefetchedUsed
=
0
;
if
(
0
==
m_nPrefetchedAvailable
)
if
(
!
m_nPrefetchedAvailable
)
{
{
m_input
.
setstate
(
std
::
ios_base
::
eofbit
);
return
0
;
}
}
}
if
(
0
==
m_nPrefetchedAvailable
)
{
return
0
;
return
m_pPrefetched
[
m_nPrefetchedUsed
++
];
}
}
}
void
Stream
::
StreamInUtf32
()
const
return
m_pPrefetched
[
m_nPrefetchedUsed
++
];
{
}
static
int
indexes
[
2
][
4
]
=
{
{
3
,
2
,
1
,
0
},
void
Stream
::
StreamInUtf32
()
const
{
{
0
,
1
,
2
,
3
}
static
int
indexes
[
2
][
4
]
=
{{
3
,
2
,
1
,
0
},
{
0
,
1
,
2
,
3
}};
};
unsigned
long
ch
=
0
;
unsigned
long
ch
=
0
;
unsigned
char
bytes
[
4
];
unsigned
char
bytes
[
4
];
int
*
pIndexes
=
(
m_charSet
==
utf32be
)
?
indexes
[
1
]
:
indexes
[
0
];
int
*
pIndexes
=
(
m_charSet
==
utf32be
)
?
indexes
[
1
]
:
indexes
[
0
];
bytes
[
0
]
=
GetNextByte
();
bytes
[
0
]
=
GetNextByte
();
bytes
[
1
]
=
GetNextByte
();
bytes
[
1
]
=
GetNextByte
();
bytes
[
2
]
=
GetNextByte
();
bytes
[
2
]
=
GetNextByte
();
bytes
[
3
]
=
GetNextByte
();
bytes
[
3
]
=
GetNextByte
();
if
(
!
m_input
.
good
())
{
if
(
!
m_input
.
good
())
return
;
{
}
return
;
}
for
(
int
i
=
0
;
i
<
4
;
++
i
)
{
ch
<<=
8
;
for
(
int
i
=
0
;
i
<
4
;
++
i
)
ch
|=
bytes
[
pIndexes
[
i
]];
{
}
ch
<<=
8
;
ch
|=
bytes
[
pIndexes
[
i
]];
QueueUnicodeCodepoint
(
m_readahead
,
ch
);
}
}
QueueUnicodeCodepoint
(
m_readahead
,
ch
);
}
}
}
src/stream.h
View file @
3355bbb3
#ifndef STREAM_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#ifndef STREAM_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define STREAM_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define STREAM_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#pragma once
#endif
#endif
#include "yaml-cpp/noncopyable.h"
#include "yaml-cpp/noncopyable.h"
#include "yaml-cpp/mark.h"
#include "yaml-cpp/mark.h"
#include <cstddef>
#include <cstddef>
...
@@ -15,65 +16,67 @@
...
@@ -15,65 +16,67 @@
#include <set>
#include <set>
#include <string>
#include <string>
namespace
YAML
namespace
YAML
{
{
class
Stream
:
private
noncopyable
{
class
Stream
:
private
noncopyable
public:
{
friend
class
StreamCharSource
;
public:
friend
class
StreamCharSource
;
Stream
(
std
::
istream
&
input
);
~
Stream
();
Stream
(
std
::
istream
&
input
);
~
Stream
();
operator
bool
()
const
;
bool
operator
!
()
const
{
return
!
static_cast
<
bool
>
(
*
this
);
}
char
peek
()
const
;
char
get
();
std
::
string
get
(
int
n
);
void
eat
(
int
n
=
1
);
operator
bool
()
const
;
static
char
eof
()
{
return
0x04
;
}
bool
operator
!
()
const
{
return
!
static_cast
<
bool
>
(
*
this
);
}
char
peek
()
const
;
const
Mark
mark
()
const
{
return
m_mark
;
}
char
get
();
int
pos
()
const
{
return
m_mark
.
pos
;
}
std
::
string
get
(
int
n
);
int
line
()
const
{
return
m_mark
.
line
;
}
void
eat
(
int
n
=
1
);
int
column
()
const
{
return
m_mark
.
column
;
}
void
ResetColumn
()
{
m_mark
.
column
=
0
;
}
static
char
eof
()
{
return
0x04
;
}
private:
enum
CharacterSet
{
const
Mark
mark
()
const
{
return
m_mark
;
}
utf8
,
int
pos
()
const
{
return
m_mark
.
pos
;
}
utf16le
,
int
line
()
const
{
return
m_mark
.
line
;
}
utf16be
,
int
column
()
const
{
return
m_mark
.
column
;
}
utf32le
,
void
ResetColumn
()
{
m_mark
.
column
=
0
;
}
utf32be
};
private:
std
::
istream
&
m_input
;
enum
CharacterSet
{
utf8
,
utf16le
,
utf16be
,
utf32le
,
utf32be
}
;
Mark
m_mark
;
std
::
istream
&
m_input
;
CharacterSet
m_charSet
;
Mark
m_mark
;
mutable
std
::
deque
<
char
>
m_readahead
;
unsigned
char
*
const
m_pPrefetched
;
CharacterSet
m_charSet
;
mutable
size_t
m_nPrefetchedAvailable
;
mutable
std
::
deque
<
char
>
m_readahead
;
mutable
size_t
m_nPrefetchedUsed
;
unsigned
char
*
const
m_pPrefetched
;
mutable
size_t
m_nPrefetchedAvailable
;
mutable
size_t
m_nPrefetchedUsed
;
void
AdvanceCurrent
();
char
CharAt
(
size_t
i
)
const
;
bool
ReadAheadTo
(
size_t
i
)
const
;
bool
_ReadAheadTo
(
size_t
i
)
const
;
void
StreamInUtf8
()
const
;
void
StreamInUtf16
()
const
;
void
StreamInUtf32
()
const
;
unsigned
char
GetNextByte
()
const
;
};
// CharAt
void
AdvanceCurrent
();
// . Unchecked access
char
CharAt
(
size_t
i
)
const
;
inline
char
Stream
::
CharAt
(
size_t
i
)
const
{
bool
ReadAheadTo
(
size_t
i
)
const
;
return
m_readahead
[
i
];
bool
_ReadAheadTo
(
size_t
i
)
const
;
}
void
StreamInUtf8
()
const
;
void
StreamInUtf16
()
const
;
inline
bool
Stream
::
ReadAheadTo
(
size_t
i
)
const
{
void
StreamInUtf32
()
const
;
if
(
m_readahead
.
size
()
>
i
)
unsigned
char
GetNextByte
()
const
;
return
true
;
};
return
_ReadAheadTo
(
i
);
}
// CharAt
// . Unchecked access
inline
char
Stream
::
CharAt
(
size_t
i
)
const
{
return
m_readahead
[
i
];
}
inline
bool
Stream
::
ReadAheadTo
(
size_t
i
)
const
{
if
(
m_readahead
.
size
()
>
i
)
return
true
;
return
_ReadAheadTo
(
i
);
}
}
}
#endif // STREAM_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif
// STREAM_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/streamcharsource.h
View file @
3355bbb3
#ifndef STREAMCHARSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#ifndef STREAMCHARSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define STREAMCHARSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define STREAMCHARSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#pragma once
#endif
#endif
#include "yaml-cpp/noncopyable.h"
#include "yaml-cpp/noncopyable.h"
#include <cstddef>
#include <cstddef>
namespace
YAML
namespace
YAML
{
{
class
StreamCharSource
{
class
StreamCharSource
public:
{
StreamCharSource
(
const
Stream
&
stream
)
:
m_offset
(
0
),
m_stream
(
stream
)
{}
public:
StreamCharSource
(
const
StreamCharSource
&
source
)
StreamCharSource
(
const
Stream
&
stream
)
:
m_offset
(
0
),
m_stream
(
stream
)
{}
:
m_offset
(
source
.
m_offset
),
m_stream
(
source
.
m_stream
)
{}
StreamCharSource
(
const
StreamCharSource
&
source
)
:
m_offset
(
source
.
m_offset
),
m_stream
(
source
.
m_stream
)
{}
~
StreamCharSource
()
{}
~
StreamCharSource
()
{}
operator
bool
()
const
;
operator
bool
()
const
;
char
operator
[](
std
::
size_t
i
)
const
{
return
m_stream
.
CharAt
(
m_offset
+
i
);
}
char
operator
[]
(
std
::
size_t
i
)
const
{
return
m_stream
.
CharAt
(
m_offset
+
i
);
}
bool
operator
!
()
const
{
return
!
static_cast
<
bool
>
(
*
this
);
}
bool
operator
!
()
const
{
return
!
static_cast
<
bool
>
(
*
this
);
}
const
StreamCharSource
operator
+
(
int
i
)
const
;
const
StreamCharSource
operator
+
(
int
i
)
const
;
private:
private:
std
::
size_t
m_offset
;
std
::
size_t
m_offset
;
const
Stream
&
m_stream
;
const
Stream
&
m_stream
;
StreamCharSource
&
operator
=
(
const
StreamCharSource
&
);
// non-assignable
StreamCharSource
&
operator
=
(
const
StreamCharSource
&
);
// non-assignable
};
};
inline
StreamCharSource
::
operator
bool
()
const
{
inline
StreamCharSource
::
operator
bool
()
const
{
return
m_stream
.
ReadAheadTo
(
m_offset
);
return
m_stream
.
ReadAheadTo
(
m_offset
);
}
}
inline
const
StreamCharSource
StreamCharSource
::
operator
+
(
int
i
)
const
{
inline
const
StreamCharSource
StreamCharSource
::
operator
+
(
int
i
)
const
{
StreamCharSource
source
(
*
this
);
StreamCharSource
source
(
*
this
);
if
(
static_cast
<
int
>
(
source
.
m_offset
)
+
i
>=
0
)
if
(
static_cast
<
int
>
(
source
.
m_offset
)
+
i
>=
0
)
source
.
m_offset
+=
i
;
source
.
m_offset
+=
i
;
else
else
source
.
m_offset
=
0
;
source
.
m_offset
=
0
;
return
source
;
return
source
;
}
}
}
}
#endif // STREAMCHARSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif
// STREAMCHARSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/stringsource.h
View file @
3355bbb3
#ifndef STRINGSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#ifndef STRINGSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define STRINGSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define STRINGSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#pragma once
#endif
#endif
#include <cstddef>
#include <cstddef>
namespace
YAML
namespace
YAML
{
{
class
StringCharSource
{
class
StringCharSource
public:
{
StringCharSource
(
const
char
*
str
,
std
::
size_t
size
)
public:
:
m_str
(
str
),
m_size
(
size
),
m_offset
(
0
)
{}
StringCharSource
(
const
char
*
str
,
std
::
size_t
size
)
:
m_str
(
str
),
m_size
(
size
),
m_offset
(
0
)
{}
operator
bool
()
const
{
return
m_offset
<
m_size
;
}
operator
bool
(
)
const
{
return
m_offset
<
m_size
;
}
char
operator
[](
std
::
size_t
i
)
const
{
return
m_str
[
m_offset
+
i
]
;
}
char
operator
[]
(
std
::
size_t
i
)
const
{
return
m_str
[
m_offset
+
i
]
;
}
bool
operator
!
()
const
{
return
!
static_cast
<
bool
>
(
*
this
)
;
}
bool
operator
!
()
const
{
return
!
static_cast
<
bool
>
(
*
this
);
}
const
StringCharSource
operator
+
(
int
i
)
const
{
const
StringCharSource
operator
+
(
int
i
)
const
{
StringCharSource
source
(
*
this
);
StringCharSource
source
(
*
this
);
if
(
static_cast
<
int
>
(
source
.
m_offset
)
+
i
>=
0
)
if
(
static_cast
<
int
>
(
source
.
m_offset
)
+
i
>=
0
)
source
.
m_offset
+
=
i
;
source
.
m_offset
+=
i
;
else
else
source
.
m_offset
=
0
;
source
.
m_offset
=
0
;
return
source
;
return
source
;
}
}
StringCharSource
&
operator
++
()
{
StringCharSource
&
operator
++
()
{
++
m_offset
;
++
m_offset
;
return
*
this
;
return
*
this
;
}
}
StringCharSource
&
operator
+=
(
std
::
size_t
offset
)
{
StringCharSource
&
operator
+=
(
std
::
size_t
offset
)
{
m_offset
+=
offset
;
m_offset
+=
offset
;
return
*
this
;
return
*
this
;
}
}
private:
private:
const
char
*
m_str
;
const
char
*
m_str
;
std
::
size_t
m_size
;
std
::
size_t
m_size
;
std
::
size_t
m_offset
;
std
::
size_t
m_offset
;
};
};
}
}
#endif // STRINGSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif
// STRINGSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/tag.cpp
View file @
3355bbb3
...
@@ -4,49 +4,45 @@
...
@@ -4,49 +4,45 @@
#include <cassert>
#include <cassert>
#include <stdexcept>
#include <stdexcept>
namespace
YAML
namespace
YAML
{
{
Tag
::
Tag
(
const
Token
&
token
)
:
type
(
static_cast
<
TYPE
>
(
token
.
data
))
{
Tag
::
Tag
(
const
Token
&
token
)
:
type
(
static_cast
<
TYPE
>
(
token
.
data
))
switch
(
type
)
{
{
case
VERBATIM
:
switch
(
type
)
{
value
=
token
.
value
;
case
VERBATIM
:
break
;
value
=
token
.
value
;
case
PRIMARY_HANDLE
:
break
;
value
=
token
.
value
;
case
PRIMARY_HANDLE
:
break
;
value
=
token
.
value
;
case
SECONDARY_HANDLE
:
break
;
value
=
token
.
value
;
case
SECONDARY_HANDLE
:
break
;
value
=
token
.
value
;
case
NAMED_HANDLE
:
break
;
handle
=
token
.
value
;
case
NAMED_HANDLE
:
value
=
token
.
params
[
0
];
handle
=
token
.
value
;
break
;
value
=
token
.
params
[
0
];
case
NON_SPECIFIC
:
break
;
break
;
case
NON_SPECIFIC
:
default:
break
;
assert
(
false
);
default:
}
assert
(
false
);
}
}
const
std
::
string
Tag
::
Translate
(
const
Directives
&
directives
)
{
switch
(
type
)
{
case
VERBATIM
:
return
value
;
case
PRIMARY_HANDLE
:
return
directives
.
TranslateTagHandle
(
"!"
)
+
value
;
case
SECONDARY_HANDLE
:
return
directives
.
TranslateTagHandle
(
"!!"
)
+
value
;
case
NAMED_HANDLE
:
return
directives
.
TranslateTagHandle
(
"!"
+
handle
+
"!"
)
+
value
;
case
NON_SPECIFIC
:
// TODO:
return
"!"
;
default:
assert
(
false
);
}
throw
std
::
runtime_error
(
"yaml-cpp: internal error, bad tag type"
);
}
}
}
const
std
::
string
Tag
::
Translate
(
const
Directives
&
directives
)
{
switch
(
type
)
{
case
VERBATIM
:
return
value
;
case
PRIMARY_HANDLE
:
return
directives
.
TranslateTagHandle
(
"!"
)
+
value
;
case
SECONDARY_HANDLE
:
return
directives
.
TranslateTagHandle
(
"!!"
)
+
value
;
case
NAMED_HANDLE
:
return
directives
.
TranslateTagHandle
(
"!"
+
handle
+
"!"
)
+
value
;
case
NON_SPECIFIC
:
// TODO:
return
"!"
;
default:
assert
(
false
);
}
throw
std
::
runtime_error
(
"yaml-cpp: internal error, bad tag type"
);
}
}
src/tag.h
View file @
3355bbb3
#ifndef TAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#ifndef TAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define TAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define TAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#pragma once
#endif
#endif
#include <string>
#include <string>
namespace
YAML
namespace
YAML
{
{
struct
Token
;
struct
Token
;
struct
Directives
;
struct
Directives
;
struct
Tag
{
struct
Tag
{
enum
TYPE
{
enum
TYPE
{
VERBATIM
,
PRIMARY_HANDLE
,
SECONDARY_HANDLE
,
NAMED_HANDLE
,
NON_SPECIFIC
VERBATIM
,
};
PRIMARY_HANDLE
,
SECONDARY_HANDLE
,
Tag
(
const
Token
&
token
);
NAMED_HANDLE
,
const
std
::
string
Translate
(
const
Directives
&
directives
);
NON_SPECIFIC
};
TYPE
type
;
std
::
string
handle
,
value
;
Tag
(
const
Token
&
token
);
};
const
std
::
string
Translate
(
const
Directives
&
directives
);
TYPE
type
;
std
::
string
handle
,
value
;
};
}
}
#endif // TAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif
// TAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/token.h
View file @
3355bbb3
#ifndef TOKEN_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#ifndef TOKEN_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define TOKEN_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define TOKEN_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#pragma once
#endif
#endif
#include "yaml-cpp/mark.h"
#include "yaml-cpp/mark.h"
#include <iostream>
#include <iostream>
#include <string>
#include <string>
#include <vector>
#include <vector>
namespace
YAML
namespace
YAML
{
{
const
std
::
string
TokenNames
[]
=
{
const
std
::
string
TokenNames
[]
=
{
"DIRECTIVE"
,
"DOC_START"
,
"DOC_END"
,
"BLOCK_SEQ_START"
,
"DIRECTIVE"
,
"BLOCK_MAP_START"
,
"BLOCK_SEQ_END"
,
"BLOCK_MAP_END"
,
"BLOCK_ENTRY"
,
"DOC_START"
,
"FLOW_SEQ_START"
,
"FLOW_MAP_START"
,
"FLOW_SEQ_END"
,
"FLOW_MAP_END"
,
"DOC_END"
,
"FLOW_MAP_COMPACT"
,
"FLOW_ENTRY"
,
"KEY"
,
"VALUE"
,
"BLOCK_SEQ_START"
,
"ANCHOR"
,
"ALIAS"
,
"TAG"
,
"SCALAR"
};
"BLOCK_MAP_START"
,
"BLOCK_SEQ_END"
,
"BLOCK_MAP_END"
,
"BLOCK_ENTRY"
,
"FLOW_SEQ_START"
,
"FLOW_MAP_START"
,
"FLOW_SEQ_END"
,
"FLOW_MAP_END"
,
"FLOW_MAP_COMPACT"
,
"FLOW_ENTRY"
,
"KEY"
,
"VALUE"
,
"ANCHOR"
,
"ALIAS"
,
"TAG"
,
"SCALAR"
};
struct
Token
{
struct
Token
{
// enums
// enums
enum
STATUS
{
VALID
,
INVALID
,
UNVERIFIED
};
enum
STATUS
{
enum
TYPE
{
VALID
,
DIRECTIVE
,
INVALID
,
DOC_START
,
UNVERIFIED
DOC_END
,
};
BLOCK_SEQ_START
,
enum
TYPE
{
BLOCK_MAP_START
,
DIRECTIVE
,
BLOCK_SEQ_END
,
DOC_START
,
BLOCK_MAP_END
,
DOC_END
,
BLOCK_ENTRY
,
BLOCK_SEQ_START
,
FLOW_SEQ_START
,
BLOCK_MAP_START
,
FLOW_MAP_START
,
BLOCK_SEQ_END
,
FLOW_SEQ_END
,
BLOCK_MAP_END
,
FLOW_MAP_END
,
BLOCK_ENTRY
,
FLOW_MAP_COMPACT
,
FLOW_SEQ_START
,
FLOW_ENTRY
,
FLOW_MAP_START
,
KEY
,
FLOW_SEQ_END
,
VALUE
,
FLOW_MAP_END
,
ANCHOR
,
FLOW_MAP_COMPACT
,
ALIAS
,
FLOW_ENTRY
,
TAG
,
KEY
,
PLAIN_SCALAR
,
VALUE
,
NON_PLAIN_SCALAR
ANCHOR
,
};
ALIAS
,
TAG
,
PLAIN_SCALAR
,
NON_PLAIN_SCALAR
};
// data
// data
Token
(
TYPE
type_
,
const
Mark
&
mark_
)
:
status
(
VALID
),
type
(
type_
),
mark
(
mark_
),
data
(
0
)
{}
Token
(
TYPE
type_
,
const
Mark
&
mark_
)
:
status
(
VALID
),
type
(
type_
),
mark
(
mark_
),
data
(
0
)
{}
friend
std
::
ostream
&
operator
<<
(
std
::
ostream
&
out
,
const
Token
&
token
)
{
friend
std
::
ostream
&
operator
<<
(
std
::
ostream
&
out
,
const
Token
&
token
)
{
out
<<
TokenNames
[
token
.
type
]
<<
std
::
string
(
": "
)
<<
token
.
value
;
out
<<
TokenNames
[
token
.
type
]
<<
std
::
string
(
": "
)
<<
token
.
value
;
for
(
std
::
size_t
i
=
0
;
i
<
token
.
params
.
size
();
i
++
)
for
(
std
::
size_t
i
=
0
;
i
<
token
.
params
.
size
();
i
++
)
out
<<
std
::
string
(
" "
)
<<
token
.
params
[
i
];
out
<<
std
::
string
(
" "
)
<<
token
.
params
[
i
];
return
out
;
return
out
;
}
}
STATUS
status
;
STATUS
status
;
TYPE
type
;
TYPE
type
;
Mark
mark
;
Mark
mark
;
std
::
string
value
;
std
::
string
value
;
std
::
vector
<
std
::
string
>
params
;
std
::
vector
<
std
::
string
>
params
;
int
data
;
int
data
;
};
};
}
}
#endif // TOKEN_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif
// TOKEN_H_62B23520_7C8E_11DE_8A39_0800200C9A66
test/core/parsertests.cpp
View file @
3355bbb3
...
@@ -3,75 +3,74 @@
...
@@ -3,75 +3,74 @@
#include "yaml-cpp/yaml.h"
#include "yaml-cpp/yaml.h"
#include <iostream>
#include <iostream>
namespace
Test
namespace
Test
{
{
namespace
Parser
{
namespace
Parser
{
TEST
NoEndOfMapFlow
()
{
TEST
NoEndOfMapFlow
()
try
{
{
HANDLE
(
"---{header: {id: 1"
);
try
{
}
HANDLE
(
"---{header: {id: 1"
);
catch
(
const
YAML
::
ParserException
&
e
)
{
}
catch
(
const
YAML
::
ParserException
&
e
)
{
YAML_ASSERT
(
e
.
msg
==
std
::
string
(
YAML
::
ErrorMsg
::
END_OF_MAP_FLOW
));
YAML_ASSERT
(
e
.
msg
==
std
::
string
(
YAML
::
ErrorMsg
::
END_OF_MAP_FLOW
));
return
true
;
return
true
;
}
}
return
" no exception caught"
;
return
" no exception caught"
;
}
}
TEST
PlainScalarStartingWithQuestionMark
()
{
TEST
PlainScalarStartingWithQuestionMark
()
HANDLE
(
"foo: ?bar"
);
{
EXPECT_DOC_START
();
HANDLE
(
"foo: ?bar"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"?bar"
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"?bar"
);
EXPECT_DOC_END
();
EXPECT_MAP_END
();
DONE
();
EXPECT_DOC_END
();
}
DONE
();
}
TEST
NullStringScalar
()
{
HANDLE
(
"foo: null"
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_NULL
(
0
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
}
namespace
{
void
RunParserTest
(
TEST
(
*
test
)(),
const
std
::
string
&
name
,
int
&
passed
,
int
&
total
)
{
TEST
ret
;
try
{
ret
=
test
();
}
catch
(
const
YAML
::
Exception
&
e
)
{
ret
.
ok
=
false
;
ret
.
error
=
std
::
string
(
" Exception caught: "
)
+
e
.
what
();
}
TEST
NullStringScalar
()
if
(
!
ret
.
ok
)
{
{
std
::
cout
<<
"Parser test failed: "
<<
name
<<
"
\n
"
;
HANDLE
(
"foo: null"
);
std
::
cout
<<
ret
.
error
<<
"
\n
"
;
EXPECT_DOC_START
();
}
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
if
(
ret
.
ok
)
EXPECT_NULL
(
0
);
passed
++
;
EXPECT_MAP_END
();
total
++
;
EXPECT_DOC_END
();
}
DONE
();
}
}
}
bool
RunParserTests
()
{
int
passed
=
0
;
namespace
{
int
total
=
0
;
void
RunParserTest
(
TEST
(
*
test
)(),
const
std
::
string
&
name
,
int
&
passed
,
int
&
total
)
{
RunParserTest
(
&
Parser
::
NoEndOfMapFlow
,
"No end of map flow"
,
passed
,
total
);
TEST
ret
;
RunParserTest
(
&
Parser
::
PlainScalarStartingWithQuestionMark
,
try
{
"Plain scalar starting with question mark"
,
passed
,
total
);
ret
=
test
();
RunParserTest
(
&
Parser
::
NullStringScalar
,
"Null string scalar"
,
passed
,
total
);
}
catch
(
const
YAML
::
Exception
&
e
)
{
ret
.
ok
=
false
;
std
::
cout
<<
"Parser tests: "
<<
passed
<<
"/"
<<
total
<<
" passed
\n
"
;
ret
.
error
=
std
::
string
(
" Exception caught: "
)
+
e
.
what
();
return
passed
==
total
;
}
}
if
(
!
ret
.
ok
)
{
std
::
cout
<<
"Parser test failed: "
<<
name
<<
"
\n
"
;
std
::
cout
<<
ret
.
error
<<
"
\n
"
;
}
if
(
ret
.
ok
)
passed
++
;
total
++
;
}
}
bool
RunParserTests
()
{
int
passed
=
0
;
int
total
=
0
;
RunParserTest
(
&
Parser
::
NoEndOfMapFlow
,
"No end of map flow"
,
passed
,
total
);
RunParserTest
(
&
Parser
::
PlainScalarStartingWithQuestionMark
,
"Plain scalar starting with question mark"
,
passed
,
total
);
RunParserTest
(
&
Parser
::
NullStringScalar
,
"Null string scalar"
,
passed
,
total
);
std
::
cout
<<
"Parser tests: "
<<
passed
<<
"/"
<<
total
<<
" passed
\n
"
;
return
passed
==
total
;
}
}
}
test/core/spectests.cpp
View file @
3355bbb3
...
@@ -5,1935 +5,1854 @@
...
@@ -5,1935 +5,1854 @@
#include <cassert>
#include <cassert>
namespace
Test
{
namespace
Test
{
namespace
Spec
{
namespace
Spec
{
// 2.1
// 2.1
TEST
SeqScalars
()
TEST
SeqScalars
()
{
{
HANDLE
(
ex2_1
);
HANDLE
(
ex2_1
);
EXPECT_DOC_START
();
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
EXPECT_DOC_END
();
DONE
();
DONE
();
}
}
// 2.2
// 2.2
TEST
MappingScalarsToScalars
()
{
TEST
MappingScalarsToScalars
()
HANDLE
(
ex2_2
);
{
EXPECT_DOC_START
();
HANDLE
(
ex2_2
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"rbi"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_SCALAR
(
"?"
,
0
,
"147"
);
EXPECT_SCALAR
(
"?"
,
0
,
"rbi"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"147"
);
EXPECT_DOC_END
();
EXPECT_MAP_END
();
DONE
();
EXPECT_DOC_END
();
}
DONE
();
}
// 2.3
TEST
MappingScalarsToSequences
()
{
// 2.3
HANDLE
(
ex2_3
);
TEST
MappingScalarsToSequences
()
EXPECT_DOC_START
();
{
EXPECT_MAP_START
(
"?"
,
0
);
HANDLE
(
ex2_3
);
EXPECT_SCALAR
(
"?"
,
0
,
"american"
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Boston Red Sox"
);
EXPECT_SCALAR
(
"?"
,
0
,
"american"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Detroit Tigers"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"New York Yankees"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Boston Red Sox"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Detroit Tigers"
);
EXPECT_SCALAR
(
"?"
,
0
,
"national"
);
EXPECT_SCALAR
(
"?"
,
0
,
"New York Yankees"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"New York Mets"
);
EXPECT_SCALAR
(
"?"
,
0
,
"national"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Chicago Cubs"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Atlanta Braves"
);
EXPECT_SCALAR
(
"?"
,
0
,
"New York Mets"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Chicago Cubs"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Atlanta Braves"
);
EXPECT_DOC_END
();
EXPECT_SEQ_END
();
DONE
();
EXPECT_MAP_END
();
}
EXPECT_DOC_END
();
DONE
();
// 2.4
}
TEST
SequenceOfMappings
()
{
HANDLE
(
ex2_4
);
// 2.4
EXPECT_DOC_START
();
TEST
SequenceOfMappings
()
EXPECT_SEQ_START
(
"?"
,
0
);
{
EXPECT_MAP_START
(
"?"
,
0
);
HANDLE
(
ex2_4
);
EXPECT_SCALAR
(
"?"
,
0
,
"name"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_SCALAR
(
"?"
,
0
,
"name"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"name"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"63"
);
EXPECT_SCALAR
(
"?"
,
0
,
"name"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.288"
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"63"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"0.288"
);
DONE
();
EXPECT_MAP_END
();
}
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
// 2.5
DONE
();
TEST
SequenceOfSequences
()
{
}
HANDLE
(
ex2_5
);
EXPECT_DOC_START
();
// 2.5
EXPECT_SEQ_START
(
"?"
,
0
);
TEST
SequenceOfSequences
()
EXPECT_SEQ_START
(
"?"
,
0
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"name"
);
HANDLE
(
ex2_5
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"name"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_SCALAR
(
"?"
,
0
,
"63"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.288"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"63"
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"0.288"
);
}
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
// 2.6
EXPECT_DOC_END
();
TEST
MappingOfMappings
()
{
DONE
();
HANDLE
(
ex2_6
);
}
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
// 2.6
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
TEST
MappingOfMappings
()
EXPECT_MAP_START
(
"?"
,
0
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
HANDLE
(
ex2_6
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"63"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"0.288"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"63"
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"0.288"
);
EXPECT_MAP_END
();
// 2.7
EXPECT_MAP_END
();
TEST
TwoDocumentsInAStream
()
{
EXPECT_DOC_END
();
HANDLE
(
ex2_7
);
DONE
();
EXPECT_DOC_START
();
}
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
// 2.7
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
TEST
TwoDocumentsInAStream
()
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
{
EXPECT_SEQ_END
();
HANDLE
(
ex2_7
);
EXPECT_DOC_END
();
EXPECT_DOC_START
();
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Chicago Cubs"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"St Louis Cardinals"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
EXPECT_DOC_END
();
DONE
();
EXPECT_DOC_START
();
}
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Chicago Cubs"
);
// 2.8
EXPECT_SCALAR
(
"?"
,
0
,
"St Louis Cardinals"
);
TEST
PlayByPlayFeed
()
{
EXPECT_SEQ_END
();
HANDLE
(
ex2_8
);
EXPECT_DOC_END
();
EXPECT_DOC_START
();
DONE
();
EXPECT_MAP_START
(
"?"
,
0
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"time"
);
EXPECT_SCALAR
(
"?"
,
0
,
"20:03:20"
);
// 2.8
EXPECT_SCALAR
(
"?"
,
0
,
"player"
);
TEST
PlayByPlayFeed
()
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"action"
);
HANDLE
(
ex2_8
);
EXPECT_SCALAR
(
"?"
,
0
,
"strike (miss)"
);
EXPECT_DOC_START
();
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"time"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"20:03:20"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"player"
);
EXPECT_SCALAR
(
"?"
,
0
,
"time"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"20:03:47"
);
EXPECT_SCALAR
(
"?"
,
0
,
"action"
);
EXPECT_SCALAR
(
"?"
,
0
,
"player"
);
EXPECT_SCALAR
(
"?"
,
0
,
"strike (miss)"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"action"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"grand slam"
);
EXPECT_DOC_START
();
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"time"
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"20:03:47"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"player"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
// 2.9
EXPECT_SCALAR
(
"?"
,
0
,
"action"
);
TEST
SingleDocumentWithTwoComments
()
{
EXPECT_SCALAR
(
"?"
,
0
,
"grand slam"
);
HANDLE
(
ex2_9
);
EXPECT_MAP_END
();
EXPECT_DOC_START
();
EXPECT_DOC_END
();
EXPECT_MAP_START
(
"?"
,
0
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
}
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
// 2.9
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
TEST
SingleDocumentWithTwoComments
()
EXPECT_SEQ_END
();
{
EXPECT_SCALAR
(
"?"
,
0
,
"rbi"
);
HANDLE
(
ex2_9
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
DONE
();
EXPECT_SEQ_END
();
}
EXPECT_SCALAR
(
"?"
,
0
,
"rbi"
);
EXPECT_SEQ_START
(
"?"
,
0
);
// 2.10
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
TEST
SimpleAnchor
()
{
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
HANDLE
(
ex2_10
);
EXPECT_SEQ_END
();
EXPECT_DOC_START
();
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
DONE
();
EXPECT_SEQ_START
(
"?"
,
0
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
1
,
"Sammy Sosa"
);
// 2.10
EXPECT_SEQ_END
();
TEST
SimpleAnchor
()
EXPECT_SCALAR
(
"?"
,
0
,
"rbi"
);
{
EXPECT_SEQ_START
(
"?"
,
0
);
HANDLE
(
ex2_10
);
EXPECT_ALIAS
(
1
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_MAP_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
DONE
();
EXPECT_SCALAR
(
"?"
,
1
,
"Sammy Sosa"
);
}
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"rbi"
);
// 2.11
EXPECT_SEQ_START
(
"?"
,
0
);
TEST
MappingBetweenSequences
()
{
EXPECT_ALIAS
(
1
);
HANDLE
(
ex2_11
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_DOC_START
();
EXPECT_SEQ_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Detroit Tigers"
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"Chicago cubs"
);
}
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
// 2.11
EXPECT_SCALAR
(
"?"
,
0
,
"2001-07-23"
);
TEST
MappingBetweenSequences
()
EXPECT_SEQ_END
();
{
EXPECT_SEQ_START
(
"?"
,
0
);
HANDLE
(
ex2_11
);
EXPECT_SCALAR
(
"?"
,
0
,
"New York Yankees"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"Atlanta Braves"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Detroit Tigers"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-07-02"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Chicago cubs"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-08-12"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"2001-08-14"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"2001-07-23"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"New York Yankees"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"Atlanta Braves"
);
EXPECT_SEQ_END
();
// 2.12
EXPECT_SEQ_START
(
"?"
,
0
);
TEST
CompactNestedMapping
()
{
EXPECT_SCALAR
(
"?"
,
0
,
"2001-07-02"
);
HANDLE
(
ex2_12
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-08-12"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"2001-08-14"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"item"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Super Hoop"
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"1"
);
EXPECT_MAP_END
();
// 2.12
EXPECT_MAP_START
(
"?"
,
0
);
TEST
CompactNestedMapping
()
EXPECT_SCALAR
(
"?"
,
0
,
"item"
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"Basketball"
);
HANDLE
(
ex2_12
);
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"4"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"item"
);
EXPECT_SCALAR
(
"?"
,
0
,
"item"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Super Hoop"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Big Shoes"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
EXPECT_SCALAR
(
"?"
,
0
,
"1"
);
EXPECT_SCALAR
(
"?"
,
0
,
"1"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"item"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Basketball"
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"4"
);
EXPECT_MAP_END
();
// 2.13
EXPECT_MAP_START
(
"?"
,
0
);
TEST
InLiteralsNewlinesArePreserved
()
{
EXPECT_SCALAR
(
"?"
,
0
,
"item"
);
HANDLE
(
ex2_13
);
EXPECT_SCALAR
(
"?"
,
0
,
"Big Shoes"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
EXPECT_SCALAR
(
"!"
,
0
,
EXPECT_SCALAR
(
"?"
,
0
,
"1"
);
"
\\
//||
\\
/||
\n
"
EXPECT_MAP_END
();
"// || ||__"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
EXPECT_DOC_END
();
DONE
();
DONE
();
}
}
// 2.14
// 2.13
TEST
InFoldedScalarsNewlinesBecomeSpaces
()
{
TEST
InLiteralsNewlinesArePreserved
()
HANDLE
(
ex2_14
);
{
EXPECT_DOC_START
();
HANDLE
(
ex2_13
);
EXPECT_SCALAR
(
"!"
,
0
,
"Mark McGwire's year was crippled by a knee injury."
);
EXPECT_DOC_START
();
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"!"
,
0
,
DONE
();
"
\\
//||
\\
/||
\n
"
}
"// || ||__"
);
EXPECT_DOC_END
();
// 2.15
DONE
();
TEST
FoldedNewlinesArePreservedForMoreIndentedAndBlankLines
()
{
}
HANDLE
(
ex2_15
);
EXPECT_DOC_START
();
// 2.14
EXPECT_SCALAR
(
"!"
,
0
,
TEST
InFoldedScalarsNewlinesBecomeSpaces
()
"Sammy Sosa completed another fine season with great stats.
\n
"
{
"
\n
"
HANDLE
(
ex2_14
);
" 63 Home Runs
\n
"
EXPECT_DOC_START
();
" 0.288 Batting Average
\n
"
EXPECT_SCALAR
(
"!"
,
0
,
"Mark McGwire's year was crippled by a knee injury."
);
"
\n
"
EXPECT_DOC_END
();
"What a year!"
);
DONE
();
EXPECT_DOC_END
();
}
DONE
();
}
// 2.15
TEST
FoldedNewlinesArePreservedForMoreIndentedAndBlankLines
()
// 2.16
{
TEST
IndentationDeterminesScope
()
{
HANDLE
(
ex2_15
);
HANDLE
(
ex2_16
);
EXPECT_DOC_START
();
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
EXPECT_MAP_START
(
"?"
,
0
);
"Sammy Sosa completed another fine season with great stats.
\n
"
EXPECT_SCALAR
(
"?"
,
0
,
"name"
);
"
\n
"
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
" 63 Home Runs
\n
"
EXPECT_SCALAR
(
"?"
,
0
,
"accomplishment"
);
" 0.288 Batting Average
\n
"
EXPECT_SCALAR
(
"!"
,
0
,
"Mark set a major league home run record in 1998.
\n
"
);
"
\n
"
EXPECT_SCALAR
(
"?"
,
0
,
"stats"
);
"What a year!"
);
EXPECT_SCALAR
(
"!"
,
0
,
EXPECT_DOC_END
();
"65 Home Runs
\n
"
DONE
();
"0.278 Batting Average
\n
"
);
}
EXPECT_MAP_END
();
EXPECT_DOC_END
();
// 2.16
DONE
();
TEST
IndentationDeterminesScope
()
}
{
HANDLE
(
ex2_16
);
// 2.17
EXPECT_DOC_START
();
TEST
QuotedScalars
()
{
EXPECT_MAP_START
(
"?"
,
0
);
HANDLE
(
ex2_17
);
EXPECT_SCALAR
(
"?"
,
0
,
"name"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"accomplishment"
);
EXPECT_SCALAR
(
"?"
,
0
,
"unicode"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Mark set a major league home run record in 1998.
\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Sosa did fine.\u263A"
);
EXPECT_SCALAR
(
"?"
,
0
,
"stats"
);
EXPECT_SCALAR
(
"?"
,
0
,
"control"
);
EXPECT_SCALAR
(
"!"
,
0
,
EXPECT_SCALAR
(
"!"
,
0
,
"
\b
1998
\t
1999
\t
2000
\n
"
);
"65 Home Runs
\n
"
EXPECT_SCALAR
(
"?"
,
0
,
"hex esc"
);
"0.278 Batting Average
\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\x0d\x0a
is
\r\n
"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"single"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\"
Howdy!
\"
he cried."
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"quoted"
);
}
EXPECT_SCALAR
(
"!"
,
0
,
" # Not a 'comment'."
);
EXPECT_SCALAR
(
"?"
,
0
,
"tie-fighter"
);
// 2.17
EXPECT_SCALAR
(
"!"
,
0
,
"|
\\
-*-/|"
);
TEST
QuotedScalars
()
EXPECT_MAP_END
();
{
EXPECT_DOC_END
();
HANDLE
(
ex2_17
);
DONE
();
EXPECT_DOC_START
();
}
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"unicode"
);
// 2.18
EXPECT_SCALAR
(
"!"
,
0
,
"Sosa did fine.\u263A"
);
TEST
MultiLineFlowScalars
()
{
EXPECT_SCALAR
(
"?"
,
0
,
"control"
);
HANDLE
(
ex2_18
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\b
1998
\t
1999
\t
2000
\n
"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"hex esc"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\x0d\x0a
is
\r\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"plain"
);
EXPECT_SCALAR
(
"?"
,
0
,
"single"
);
EXPECT_SCALAR
(
"?"
,
0
,
"This unquoted scalar spans many lines."
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\"
Howdy!
\"
he cried."
);
EXPECT_SCALAR
(
"?"
,
0
,
"quoted"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quoted"
);
EXPECT_SCALAR
(
"!"
,
0
,
"So does this quoted scalar.
\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
" # Not a 'comment'."
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"tie-fighter"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"!"
,
0
,
"|
\\
-*-/|"
);
DONE
();
EXPECT_MAP_END
();
}
EXPECT_DOC_END
();
DONE
();
// TODO: 2.19 - 2.22 schema tags
}
// 2.23
// 2.18
TEST
VariousExplicitTags
()
{
TEST
MultiLineFlowScalars
()
HANDLE
(
ex2_23
);
{
EXPECT_DOC_START
();
HANDLE
(
ex2_18
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"not-date"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"2002-04-28"
);
EXPECT_SCALAR
(
"?"
,
0
,
"plain"
);
EXPECT_SCALAR
(
"?"
,
0
,
"picture"
);
EXPECT_SCALAR
(
"?"
,
0
,
"This unquoted scalar spans many lines."
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:binary"
,
0
,
EXPECT_SCALAR
(
"?"
,
0
,
"quoted"
);
"R0lGODlhDAAMAIQAAP//9/X
\n
"
EXPECT_SCALAR
(
"!"
,
0
,
"So does this quoted scalar.
\n
"
);
"17unp5WZmZgAAAOfn515eXv
\n
"
EXPECT_MAP_END
();
"Pz7Y6OjuDg4J+fn5OTk6enp
\n
"
EXPECT_DOC_END
();
"56enmleECcgggoBADs=
\n
"
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"application specific tag"
);
}
EXPECT_SCALAR
(
"!something"
,
0
,
"The semantics of the tag
\n
"
// TODO: 2.19 - 2.22 schema tags
"above may be different for
\n
"
"different documents."
);
// 2.23
EXPECT_MAP_END
();
TEST
VariousExplicitTags
()
EXPECT_DOC_END
();
{
DONE
();
HANDLE
(
ex2_23
);
}
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
// 2.24
EXPECT_SCALAR
(
"?"
,
0
,
"not-date"
);
TEST
GlobalTags
()
{
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"2002-04-28"
);
HANDLE
(
ex2_24
);
EXPECT_SCALAR
(
"?"
,
0
,
"picture"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"tag:yaml.org,2002:binary"
,
0
,
EXPECT_SEQ_START
(
"tag:clarkevans.com,2002:shape"
,
0
);
"R0lGODlhDAAMAIQAAP//9/X
\n
"
EXPECT_MAP_START
(
"tag:clarkevans.com,2002:circle"
,
0
);
"17unp5WZmZgAAAOfn515eXv
\n
"
EXPECT_SCALAR
(
"?"
,
0
,
"center"
);
"Pz7Y6OjuDg4J+fn5OTk6enp
\n
"
EXPECT_MAP_START
(
"?"
,
1
);
"56enmleECcgggoBADs=
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"x"
);
EXPECT_SCALAR
(
"?"
,
0
,
"application specific tag"
);
EXPECT_SCALAR
(
"?"
,
0
,
"73"
);
EXPECT_SCALAR
(
"!something"
,
0
,
EXPECT_SCALAR
(
"?"
,
0
,
"y"
);
"The semantics of the tag
\n
"
EXPECT_SCALAR
(
"?"
,
0
,
"129"
);
"above may be different for
\n
"
EXPECT_MAP_END
();
"different documents."
);
EXPECT_SCALAR
(
"?"
,
0
,
"radius"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"7"
);
EXPECT_DOC_END
();
EXPECT_MAP_END
();
DONE
();
EXPECT_MAP_START
(
"tag:clarkevans.com,2002:line"
,
0
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"start"
);
EXPECT_ALIAS
(
1
);
// 2.24
EXPECT_SCALAR
(
"?"
,
0
,
"finish"
);
TEST
GlobalTags
()
EXPECT_MAP_START
(
"?"
,
0
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"x"
);
HANDLE
(
ex2_24
);
EXPECT_SCALAR
(
"?"
,
0
,
"89"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"y"
);
EXPECT_SEQ_START
(
"tag:clarkevans.com,2002:shape"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"102"
);
EXPECT_MAP_START
(
"tag:clarkevans.com,2002:circle"
,
0
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"center"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
1
);
EXPECT_MAP_START
(
"tag:clarkevans.com,2002:label"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"x"
);
EXPECT_SCALAR
(
"?"
,
0
,
"start"
);
EXPECT_SCALAR
(
"?"
,
0
,
"73"
);
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"y"
);
EXPECT_SCALAR
(
"?"
,
0
,
"color"
);
EXPECT_SCALAR
(
"?"
,
0
,
"129"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0xFFEEBB"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"text"
);
EXPECT_SCALAR
(
"?"
,
0
,
"radius"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Pretty vector drawing."
);
EXPECT_SCALAR
(
"?"
,
0
,
"7"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_MAP_START
(
"tag:clarkevans.com,2002:line"
,
0
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"start"
);
DONE
();
EXPECT_ALIAS
(
1
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"finish"
);
EXPECT_MAP_START
(
"?"
,
0
);
// 2.25
EXPECT_SCALAR
(
"?"
,
0
,
"x"
);
TEST
UnorderedSets
()
{
EXPECT_SCALAR
(
"?"
,
0
,
"89"
);
HANDLE
(
ex2_25
);
EXPECT_SCALAR
(
"?"
,
0
,
"y"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"102"
);
EXPECT_MAP_START
(
"tag:yaml.org,2002:set"
,
0
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_MAP_END
();
EXPECT_NULL
(
0
);
EXPECT_MAP_START
(
"tag:clarkevans.com,2002:label"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"start"
);
EXPECT_NULL
(
0
);
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_SCALAR
(
"?"
,
0
,
"color"
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"0xFFEEBB"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"text"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Pretty vector drawing."
);
DONE
();
EXPECT_MAP_END
();
}
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
// 2.26
DONE
();
TEST
OrderedMappings
()
{
}
HANDLE
(
ex2_26
);
EXPECT_DOC_START
();
// 2.25
EXPECT_SEQ_START
(
"tag:yaml.org,2002:omap"
,
0
);
TEST
UnorderedSets
()
EXPECT_MAP_START
(
"?"
,
0
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
HANDLE
(
ex2_25
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_DOC_START
();
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"tag:yaml.org,2002:set"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"63"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_MAP_END
();
EXPECT_NULL
(
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"58"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
EXPECT_SEQ_END
();
DONE
();
EXPECT_DOC_END
();
}
DONE
();
}
// 2.26
TEST
OrderedMappings
()
// 2.27
{
TEST
Invoice
()
{
HANDLE
(
ex2_26
);
HANDLE
(
ex2_27
);
EXPECT_DOC_START
();
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"tag:yaml.org,2002:omap"
,
0
);
EXPECT_MAP_START
(
"tag:clarkevans.com,2002:invoice"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"invoice"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"34843"
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_SCALAR
(
"?"
,
0
,
"date"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"2001-01-23"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"bill-to"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_MAP_START
(
"?"
,
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"63"
);
EXPECT_SCALAR
(
"?"
,
0
,
"given"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Chris"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"family"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Dumars"
);
EXPECT_SCALAR
(
"?"
,
0
,
"58"
);
EXPECT_SCALAR
(
"?"
,
0
,
"address"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"lines"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"!"
,
0
,
DONE
();
"458 Walkman Dr.
\n
"
}
"Suite #292
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"city"
);
// 2.27
EXPECT_SCALAR
(
"?"
,
0
,
"Royal Oak"
);
TEST
Invoice
()
EXPECT_SCALAR
(
"?"
,
0
,
"state"
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"MI"
);
HANDLE
(
ex2_27
);
EXPECT_SCALAR
(
"?"
,
0
,
"postal"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"48046"
);
EXPECT_MAP_START
(
"tag:clarkevans.com,2002:invoice"
,
0
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"invoice"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"34843"
);
EXPECT_SCALAR
(
"?"
,
0
,
"ship-to"
);
EXPECT_SCALAR
(
"?"
,
0
,
"date"
);
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-01-23"
);
EXPECT_SCALAR
(
"?"
,
0
,
"product"
);
EXPECT_SCALAR
(
"?"
,
0
,
"bill-to"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
1
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"given"
);
EXPECT_SCALAR
(
"?"
,
0
,
"sku"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Chris"
);
EXPECT_SCALAR
(
"?"
,
0
,
"BL394D"
);
EXPECT_SCALAR
(
"?"
,
0
,
"family"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Dumars"
);
EXPECT_SCALAR
(
"?"
,
0
,
"4"
);
EXPECT_SCALAR
(
"?"
,
0
,
"address"
);
EXPECT_SCALAR
(
"?"
,
0
,
"description"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Basketball"
);
EXPECT_SCALAR
(
"?"
,
0
,
"lines"
);
EXPECT_SCALAR
(
"?"
,
0
,
"price"
);
EXPECT_SCALAR
(
"!"
,
0
,
EXPECT_SCALAR
(
"?"
,
0
,
"450.00"
);
"458 Walkman Dr.
\n
"
EXPECT_MAP_END
();
"Suite #292
\n
"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"city"
);
EXPECT_SCALAR
(
"?"
,
0
,
"sku"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Royal Oak"
);
EXPECT_SCALAR
(
"?"
,
0
,
"BL4438H"
);
EXPECT_SCALAR
(
"?"
,
0
,
"state"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
EXPECT_SCALAR
(
"?"
,
0
,
"MI"
);
EXPECT_SCALAR
(
"?"
,
0
,
"1"
);
EXPECT_SCALAR
(
"?"
,
0
,
"postal"
);
EXPECT_SCALAR
(
"?"
,
0
,
"description"
);
EXPECT_SCALAR
(
"?"
,
0
,
"48046"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Super Hoop"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"price"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"2392.00"
);
EXPECT_SCALAR
(
"?"
,
0
,
"ship-to"
);
EXPECT_MAP_END
();
EXPECT_ALIAS
(
1
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"product"
);
EXPECT_SCALAR
(
"?"
,
0
,
"tax"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"251.42"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"total"
);
EXPECT_SCALAR
(
"?"
,
0
,
"sku"
);
EXPECT_SCALAR
(
"?"
,
0
,
"4443.52"
);
EXPECT_SCALAR
(
"?"
,
0
,
"BL394D"
);
EXPECT_SCALAR
(
"?"
,
0
,
"comments"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
EXPECT_SCALAR
(
EXPECT_SCALAR
(
"?"
,
0
,
"4"
);
"?"
,
0
,
EXPECT_SCALAR
(
"?"
,
0
,
"description"
);
"Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338."
);
EXPECT_SCALAR
(
"?"
,
0
,
"Basketball"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"price"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"450.00"
);
DONE
();
EXPECT_MAP_END
();
}
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sku"
);
// 2.28
EXPECT_SCALAR
(
"?"
,
0
,
"BL4438H"
);
TEST
LogFile
()
{
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
HANDLE
(
ex2_28
);
EXPECT_SCALAR
(
"?"
,
0
,
"1"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"description"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Super Hoop"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Time"
);
EXPECT_SCALAR
(
"?"
,
0
,
"price"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-11-23 15:01:42 -5"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2392.00"
);
EXPECT_SCALAR
(
"?"
,
0
,
"User"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"ed"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Warning"
);
EXPECT_SCALAR
(
"?"
,
0
,
"tax"
);
EXPECT_SCALAR
(
"?"
,
0
,
"This is an error message for the log file"
);
EXPECT_SCALAR
(
"?"
,
0
,
"251.42"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"total"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"4443.52"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"comments"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338."
);
EXPECT_SCALAR
(
"?"
,
0
,
"Time"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"2001-11-23 15:02:31 -5"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"User"
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"ed"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"Warning"
);
EXPECT_SCALAR
(
"?"
,
0
,
"A slightly different error message."
);
// 2.28
EXPECT_MAP_END
();
TEST
LogFile
()
EXPECT_DOC_END
();
{
EXPECT_DOC_START
();
HANDLE
(
ex2_28
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"Date"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-11-23 15:03:17 -5"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Time"
);
EXPECT_SCALAR
(
"?"
,
0
,
"User"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-11-23 15:01:42 -5"
);
EXPECT_SCALAR
(
"?"
,
0
,
"ed"
);
EXPECT_SCALAR
(
"?"
,
0
,
"User"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Fatal"
);
EXPECT_SCALAR
(
"?"
,
0
,
"ed"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Unknown variable
\"
bar
\"
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Warning"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Stack"
);
EXPECT_SCALAR
(
"?"
,
0
,
"This is an error message for the log file"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"file"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"TopClass.py"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"line"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Time"
);
EXPECT_SCALAR
(
"?"
,
0
,
"23"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-11-23 15:02:31 -5"
);
EXPECT_SCALAR
(
"?"
,
0
,
"code"
);
EXPECT_SCALAR
(
"?"
,
0
,
"User"
);
EXPECT_SCALAR
(
"!"
,
0
,
"x = MoreObject(
\"
345
\\
n
\"
)
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"ed"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Warning"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"A slightly different error message."
);
EXPECT_SCALAR
(
"?"
,
0
,
"file"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"MoreClass.py"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"line"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"58"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"code"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Date"
);
EXPECT_SCALAR
(
"!"
,
0
,
"foo = bar"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-11-23 15:03:17 -5"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"User"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"ed"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Fatal"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Unknown variable
\"
bar
\"
"
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"Stack"
);
}
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
// TODO: 5.1 - 5.2 BOM
EXPECT_SCALAR
(
"?"
,
0
,
"file"
);
EXPECT_SCALAR
(
"?"
,
0
,
"TopClass.py"
);
// 5.3
EXPECT_SCALAR
(
"?"
,
0
,
"line"
);
TEST
BlockStructureIndicators
()
{
EXPECT_SCALAR
(
"?"
,
0
,
"23"
);
HANDLE
(
ex5_3
);
EXPECT_SCALAR
(
"?"
,
0
,
"code"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"x = MoreObject(
\"
345
\\
n
\"
)
\n
"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"sequence"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"file"
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"MoreClass.py"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SCALAR
(
"?"
,
0
,
"line"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"58"
);
EXPECT_SCALAR
(
"?"
,
0
,
"mapping"
);
EXPECT_SCALAR
(
"?"
,
0
,
"code"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"foo = bar"
);
EXPECT_SCALAR
(
"?"
,
0
,
"sky"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"blue"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"sea"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"green"
);
EXPECT_DOC_END
();
EXPECT_MAP_END
();
DONE
();
EXPECT_MAP_END
();
}
EXPECT_DOC_END
();
DONE
();
// TODO: 5.1 - 5.2 BOM
}
// 5.3
// 5.4
TEST
BlockStructureIndicators
()
TEST
FlowStructureIndicators
()
{
{
HANDLE
(
ex5_4
);
HANDLE
(
ex5_3
);
EXPECT_DOC_START
();
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sequence"
);
EXPECT_SCALAR
(
"?"
,
0
,
"sequence"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"mapping"
);
EXPECT_SCALAR
(
"?"
,
0
,
"mapping"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sky"
);
EXPECT_SCALAR
(
"?"
,
0
,
"sky"
);
EXPECT_SCALAR
(
"?"
,
0
,
"blue"
);
EXPECT_SCALAR
(
"?"
,
0
,
"blue"
);
EXPECT_SCALAR
(
"?"
,
0
,
"sea"
);
EXPECT_SCALAR
(
"?"
,
0
,
"sea"
);
EXPECT_SCALAR
(
"?"
,
0
,
"green"
);
EXPECT_SCALAR
(
"?"
,
0
,
"green"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
EXPECT_DOC_END
();
DONE
();
DONE
();
}
}
// 5.5
// 5.4
TEST
CommentIndicator
()
{
TEST
FlowStructureIndicators
()
HANDLE
(
ex5_5
);
{
DONE
();
HANDLE
(
ex5_4
);
}
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
// 5.6
EXPECT_SCALAR
(
"?"
,
0
,
"sequence"
);
TEST
NodePropertyIndicators
()
{
EXPECT_SEQ_START
(
"?"
,
0
);
HANDLE
(
ex5_6
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"anchored"
);
EXPECT_SCALAR
(
"?"
,
0
,
"mapping"
);
EXPECT_SCALAR
(
"!local"
,
1
,
"value"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"alias"
);
EXPECT_SCALAR
(
"?"
,
0
,
"sky"
);
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"blue"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"sea"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"green"
);
DONE
();
EXPECT_MAP_END
();
}
EXPECT_MAP_END
();
EXPECT_DOC_END
();
// 5.7
DONE
();
TEST
BlockScalarIndicators
()
{
}
HANDLE
(
ex5_7
);
EXPECT_DOC_START
();
// 5.5
EXPECT_MAP_START
(
"?"
,
0
);
TEST
CommentIndicator
()
EXPECT_SCALAR
(
"?"
,
0
,
"literal"
);
{
EXPECT_SCALAR
(
"!"
,
0
,
"some
\n
text
\n
"
);
HANDLE
(
ex5_5
);
EXPECT_SCALAR
(
"?"
,
0
,
"folded"
);
DONE
();
EXPECT_SCALAR
(
"!"
,
0
,
"some text
\n
"
);
}
EXPECT_MAP_END
();
EXPECT_DOC_END
();
// 5.6
DONE
();
TEST
NodePropertyIndicators
()
}
{
HANDLE
(
ex5_6
);
// 5.8
EXPECT_DOC_START
();
TEST
QuotedScalarIndicators
()
{
EXPECT_MAP_START
(
"?"
,
0
);
HANDLE
(
ex5_8
);
EXPECT_SCALAR
(
"?"
,
0
,
"anchored"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!local"
,
1
,
"value"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"alias"
);
EXPECT_SCALAR
(
"?"
,
0
,
"single"
);
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"!"
,
0
,
"text"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"double"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"!"
,
0
,
"text"
);
DONE
();
EXPECT_MAP_END
();
}
EXPECT_DOC_END
();
DONE
();
// 5.7
}
TEST
BlockScalarIndicators
()
{
// TODO: 5.9 directive
HANDLE
(
ex5_7
);
// TODO: 5.10 reserved indicator
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
// 5.11
EXPECT_SCALAR
(
"?"
,
0
,
"literal"
);
TEST
LineBreakCharacters
()
{
EXPECT_SCALAR
(
"!"
,
0
,
"some
\n
text
\n
"
);
HANDLE
(
ex5_11
);
EXPECT_SCALAR
(
"?"
,
0
,
"folded"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"some text
\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
EXPECT_MAP_END
();
"Line break (no glyph)
\n
"
EXPECT_DOC_END
();
"Line break (glyphed)
\n
"
);
DONE
();
EXPECT_DOC_END
();
}
DONE
();
}
// 5.8
TEST
QuotedScalarIndicators
()
// 5.12
{
TEST
TabsAndSpaces
()
{
HANDLE
(
ex5_8
);
HANDLE
(
ex5_12
);
EXPECT_DOC_START
();
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"single"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quoted"
);
EXPECT_SCALAR
(
"!"
,
0
,
"text"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Quoted
\t
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"double"
);
EXPECT_SCALAR
(
"?"
,
0
,
"block"
);
EXPECT_SCALAR
(
"!"
,
0
,
"text"
);
EXPECT_SCALAR
(
"!"
,
0
,
EXPECT_MAP_END
();
"void main() {
\n
"
EXPECT_DOC_END
();
"
\t
printf(
\"
Hello, world!
\\
n
\"
);
\n
"
DONE
();
"}"
);
}
EXPECT_MAP_END
();
EXPECT_DOC_END
();
// TODO: 5.9 directive
DONE
();
// TODO: 5.10 reserved indicator
}
// 5.11
// 5.13
TEST
LineBreakCharacters
()
TEST
EscapedCharacters
()
{
{
HANDLE
(
ex5_13
);
HANDLE
(
ex5_11
);
EXPECT_DOC_START
();
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
EXPECT_SCALAR
(
"!"
,
0
,
"Fun with
\x5C
\x22
\x07
\x08
\x1B
\x0C
\x0A
\x0D
\x09
\x0B
"
+
"Line break (no glyph)
\n
"
std
::
string
(
"
\x00
"
,
1
)
+
"Line break (glyphed)
\n
"
);
"
\x20
\xA0
\x85
\xe2\x80\xa8
\xe2\x80\xa9
A A A"
);
EXPECT_DOC_END
();
EXPECT_DOC_END
();
DONE
();
DONE
();
}
}
// 5.12
// 5.14
TEST
TabsAndSpaces
()
TEST
InvalidEscapedCharacters
()
{
{
try
{
HANDLE
(
ex5_12
);
HANDLE
(
ex5_14
);
EXPECT_DOC_START
();
}
EXPECT_MAP_START
(
"?"
,
0
);
catch
(
const
YAML
::
ParserException
&
e
)
{
EXPECT_SCALAR
(
"?"
,
0
,
"quoted"
);
YAML_ASSERT
(
e
.
msg
==
std
::
string
(
YAML
::
ErrorMsg
::
INVALID_ESCAPE
)
+
"c"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Quoted
\t
"
);
return
true
;
EXPECT_SCALAR
(
"?"
,
0
,
"block"
);
}
EXPECT_SCALAR
(
"!"
,
0
,
return
" no exception caught"
;
"void main() {
\n
"
}
"
\t
printf(
\"
Hello, world!
\\
n
\"
);
\n
"
"}"
);
// 6.1
EXPECT_MAP_END
();
TEST
IndentationSpaces
()
{
EXPECT_DOC_END
();
HANDLE
(
ex6_1
);
DONE
();
EXPECT_DOC_START
();
}
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Not indented"
);
// 5.13
EXPECT_MAP_START
(
"?"
,
0
);
TEST
EscapedCharacters
()
EXPECT_SCALAR
(
"?"
,
0
,
"By one space"
);
{
EXPECT_SCALAR
(
"!"
,
0
,
"By four
\n
spaces
\n
"
);
HANDLE
(
ex5_13
);
EXPECT_SCALAR
(
"?"
,
0
,
"Flow style"
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"Fun with
\x5C
\x22
\x07
\x08
\x1B
\x0C
\x0A
\x0D
\x09
\x0B
"
+
std
::
string
(
"
\x00
"
,
1
)
+
"
\x20
\xA0
\x85
\xe2\x80\xa8
\xe2\x80\xa9
A A A"
);
EXPECT_SCALAR
(
"?"
,
0
,
"By two"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Also by two"
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"Still by two"
);
}
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
// 5.14
EXPECT_MAP_END
();
TEST
InvalidEscapedCharacters
()
EXPECT_DOC_END
();
{
DONE
();
try
{
}
HANDLE
(
ex5_14
);
}
catch
(
const
YAML
::
ParserException
&
e
)
{
// 6.2
YAML_ASSERT
(
e
.
msg
==
std
::
string
(
YAML
::
ErrorMsg
::
INVALID_ESCAPE
)
+
"c"
);
TEST
IndentationIndicators
()
{
return
true
;
HANDLE
(
ex6_2
);
}
EXPECT_DOC_START
();
return
" no exception caught"
;
EXPECT_MAP_START
(
"?"
,
0
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"a"
);
EXPECT_SEQ_START
(
"?"
,
0
);
// 6.1
EXPECT_SCALAR
(
"?"
,
0
,
"b"
);
TEST
IndentationSpaces
()
EXPECT_SEQ_START
(
"?"
,
0
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"c"
);
HANDLE
(
ex6_1
);
EXPECT_SCALAR
(
"?"
,
0
,
"d"
);
EXPECT_DOC_START
();
EXPECT_SEQ_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Not indented"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"By one space"
);
DONE
();
EXPECT_SCALAR
(
"!"
,
0
,
"By four
\n
spaces
\n
"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"Flow style"
);
EXPECT_SEQ_START
(
"?"
,
0
);
// 6.3
EXPECT_SCALAR
(
"?"
,
0
,
"By two"
);
TEST
SeparationSpaces
()
{
EXPECT_SCALAR
(
"?"
,
0
,
"Also by two"
);
HANDLE
(
ex6_3
);
EXPECT_SCALAR
(
"?"
,
0
,
"Still by two"
);
EXPECT_DOC_START
();
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
DONE
();
EXPECT_MAP_END
();
}
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"baz"
);
// 6.2
EXPECT_SCALAR
(
"?"
,
0
,
"baz"
);
TEST
IndentationIndicators
()
EXPECT_SEQ_END
();
{
EXPECT_SEQ_END
();
HANDLE
(
ex6_2
);
EXPECT_DOC_END
();
EXPECT_DOC_START
();
DONE
();
EXPECT_MAP_START
(
"?"
,
0
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"a"
);
EXPECT_SEQ_START
(
"?"
,
0
);
// 6.4
EXPECT_SCALAR
(
"?"
,
0
,
"b"
);
TEST
LinePrefixes
()
{
EXPECT_SEQ_START
(
"?"
,
0
);
HANDLE
(
ex6_4
);
EXPECT_SCALAR
(
"?"
,
0
,
"c"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"d"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"plain"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"text lines"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"quoted"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"!"
,
0
,
"text lines"
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"block"
);
}
EXPECT_SCALAR
(
"!"
,
0
,
"text
\n
\t
lines
\n
"
);
EXPECT_MAP_END
();
// 6.3
EXPECT_DOC_END
();
TEST
SeparationSpaces
()
DONE
();
{
}
HANDLE
(
ex6_3
);
EXPECT_DOC_START
();
// 6.5
EXPECT_SEQ_START
(
"?"
,
0
);
TEST
EmptyLines
()
{
EXPECT_MAP_START
(
"?"
,
0
);
HANDLE
(
ex6_5
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Folding"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"Empty line
\n
as a line feed"
);
EXPECT_SCALAR
(
"?"
,
0
,
"baz"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Chomping"
);
EXPECT_SCALAR
(
"?"
,
0
,
"baz"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Clipped empty lines
\n
"
);
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
EXPECT_DOC_END
();
DONE
();
DONE
();
}
}
// 6.6
// 6.4
TEST
LineFolding
()
{
TEST
LinePrefixes
()
HANDLE
(
ex6_6
);
{
EXPECT_DOC_START
();
HANDLE
(
ex6_4
);
EXPECT_SCALAR
(
"!"
,
0
,
"trimmed
\n\n\n
as space"
);
EXPECT_DOC_START
();
EXPECT_DOC_END
();
EXPECT_MAP_START
(
"?"
,
0
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"plain"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"text lines"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quoted"
);
// 6.7
EXPECT_SCALAR
(
"!"
,
0
,
"text lines"
);
TEST
BlockFolding
()
{
EXPECT_SCALAR
(
"?"
,
0
,
"block"
);
HANDLE
(
ex6_7
);
EXPECT_SCALAR
(
"!"
,
0
,
"text
\n
\t
lines
\n
"
);
EXPECT_DOC_START
();
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"!"
,
0
,
"foo
\n\n\t
bar
\n\n
baz
\n
"
);
EXPECT_DOC_END
();
EXPECT_DOC_END
();
DONE
();
DONE
();
}
}
// 6.5
// 6.8
TEST
EmptyLines
()
TEST
FlowFolding
()
{
{
HANDLE
(
ex6_8
);
HANDLE
(
ex6_5
);
EXPECT_DOC_START
();
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
" foo
\n
bar
\n
baz "
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Folding"
);
DONE
();
EXPECT_SCALAR
(
"!"
,
0
,
"Empty line
\n
as a line feed"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"Chomping"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Clipped empty lines
\n
"
);
// 6.9
EXPECT_MAP_END
();
TEST
SeparatedComment
()
{
EXPECT_DOC_END
();
HANDLE
(
ex6_9
);
DONE
();
EXPECT_DOC_START
();
}
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"key"
);
// 6.6
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
TEST
LineFolding
()
EXPECT_MAP_END
();
{
EXPECT_DOC_END
();
HANDLE
(
ex6_6
);
DONE
();
EXPECT_DOC_START
();
}
EXPECT_SCALAR
(
"!"
,
0
,
"trimmed
\n\n\n
as space"
);
EXPECT_DOC_END
();
// 6.10
DONE
();
TEST
CommentLines
()
{
}
HANDLE
(
ex6_10
);
DONE
();
// 6.7
}
TEST
BlockFolding
()
{
// 6.11
HANDLE
(
ex6_7
);
TEST
MultiLineComments
()
{
EXPECT_DOC_START
();
HANDLE
(
ex6_11
);
EXPECT_SCALAR
(
"!"
,
0
,
"foo
\n\n\t
bar
\n\n
baz
\n
"
);
EXPECT_DOC_START
();
EXPECT_DOC_END
();
EXPECT_MAP_START
(
"?"
,
0
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"key"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_MAP_END
();
// 6.8
EXPECT_DOC_END
();
TEST
FlowFolding
()
DONE
();
{
}
HANDLE
(
ex6_8
);
EXPECT_DOC_START
();
// 6.12
EXPECT_SCALAR
(
"!"
,
0
,
" foo
\n
bar
\n
baz "
);
TEST
SeparationSpacesII
()
{
EXPECT_DOC_END
();
HANDLE
(
ex6_12
);
DONE
();
EXPECT_DOC_START
();
}
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
// 6.9
EXPECT_SCALAR
(
"?"
,
0
,
"first"
);
TEST
SeparatedComment
()
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy"
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"last"
);
HANDLE
(
ex6_9
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sosa"
);
EXPECT_DOC_START
();
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
DONE
();
EXPECT_MAP_END
();
}
EXPECT_MAP_END
();
EXPECT_DOC_END
();
// 6.10
DONE
();
TEST
CommentLines
()
}
{
HANDLE
(
ex6_10
);
// 6.13
DONE
();
TEST
ReservedDirectives
()
{
}
HANDLE
(
ex6_13
);
EXPECT_DOC_START
();
// 6.11
EXPECT_SCALAR
(
"!"
,
0
,
"foo"
);
TEST
MultiLineComments
()
EXPECT_DOC_END
();
{
DONE
();
HANDLE
(
ex6_11
);
}
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
// 6.14
EXPECT_SCALAR
(
"?"
,
0
,
"key"
);
TEST
YAMLDirective
()
{
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
HANDLE
(
ex6_14
);
EXPECT_MAP_END
();
EXPECT_DOC_START
();
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"!"
,
0
,
"foo"
);
DONE
();
EXPECT_DOC_END
();
}
DONE
();
}
// 6.12
TEST
SeparationSpacesII
()
// 6.15
{
TEST
InvalidRepeatedYAMLDirective
()
{
HANDLE
(
ex6_12
);
try
{
EXPECT_DOC_START
();
HANDLE
(
ex6_15
);
EXPECT_MAP_START
(
"?"
,
0
);
}
EXPECT_MAP_START
(
"?"
,
0
);
catch
(
const
YAML
::
ParserException
&
e
)
{
EXPECT_SCALAR
(
"?"
,
0
,
"first"
);
if
(
e
.
msg
==
YAML
::
ErrorMsg
::
REPEATED_YAML_DIRECTIVE
)
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy"
);
return
true
;
EXPECT_SCALAR
(
"?"
,
0
,
"last"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sosa"
);
throw
;
EXPECT_MAP_END
();
}
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
return
" No exception was thrown"
;
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
// 6.16
EXPECT_MAP_END
();
TEST
TagDirective
()
{
EXPECT_MAP_END
();
HANDLE
(
ex6_16
);
EXPECT_DOC_END
();
EXPECT_DOC_START
();
DONE
();
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"foo"
);
}
EXPECT_DOC_END
();
DONE
();
// 6.13
}
TEST
ReservedDirectives
()
{
// 6.17
HANDLE
(
ex6_13
);
TEST
InvalidRepeatedTagDirective
()
{
EXPECT_DOC_START
();
try
{
EXPECT_SCALAR
(
"!"
,
0
,
"foo"
);
HANDLE
(
ex6_17
);
EXPECT_DOC_END
();
}
DONE
();
catch
(
const
YAML
::
ParserException
&
e
)
{
}
if
(
e
.
msg
==
YAML
::
ErrorMsg
::
REPEATED_TAG_DIRECTIVE
)
return
true
;
// 6.14
TEST
YAMLDirective
()
throw
;
{
}
HANDLE
(
ex6_14
);
EXPECT_DOC_START
();
return
" No exception was thrown"
;
EXPECT_SCALAR
(
"!"
,
0
,
"foo"
);
}
EXPECT_DOC_END
();
DONE
();
// 6.18
}
TEST
PrimaryTagHandle
()
{
HANDLE
(
ex6_18
);
// 6.15
EXPECT_DOC_START
();
TEST
InvalidRepeatedYAMLDirective
()
EXPECT_SCALAR
(
"!foo"
,
0
,
"bar"
);
{
EXPECT_DOC_END
();
try
{
EXPECT_DOC_START
();
HANDLE
(
ex6_15
);
EXPECT_SCALAR
(
"tag:example.com,2000:app/foo"
,
0
,
"bar"
);
}
catch
(
const
YAML
::
ParserException
&
e
)
{
EXPECT_DOC_END
();
if
(
e
.
msg
==
YAML
::
ErrorMsg
::
REPEATED_YAML_DIRECTIVE
)
DONE
();
return
true
;
}
throw
;
// 6.19
}
TEST
SecondaryTagHandle
()
{
HANDLE
(
ex6_19
);
return
" No exception was thrown"
;
EXPECT_DOC_START
();
}
EXPECT_SCALAR
(
"tag:example.com,2000:app/int"
,
0
,
"1 - 3"
);
EXPECT_DOC_END
();
// 6.16
DONE
();
TEST
TagDirective
()
}
{
HANDLE
(
ex6_16
);
// 6.20
EXPECT_DOC_START
();
TEST
TagHandles
()
{
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"foo"
);
HANDLE
(
ex6_20
);
EXPECT_DOC_END
();
EXPECT_DOC_START
();
DONE
();
EXPECT_SCALAR
(
"tag:example.com,2000:app/foo"
,
0
,
"bar"
);
}
EXPECT_DOC_END
();
DONE
();
// 6.17
}
TEST
InvalidRepeatedTagDirective
()
{
// 6.21
try
{
TEST
LocalTagPrefix
()
{
HANDLE
(
ex6_17
);
HANDLE
(
ex6_21
);
}
catch
(
const
YAML
::
ParserException
&
e
)
{
EXPECT_DOC_START
();
if
(
e
.
msg
==
YAML
::
ErrorMsg
::
REPEATED_TAG_DIRECTIVE
)
EXPECT_SCALAR
(
"!my-light"
,
0
,
"fluorescent"
);
return
true
;
EXPECT_DOC_END
();
EXPECT_DOC_START
();
throw
;
EXPECT_SCALAR
(
"!my-light"
,
0
,
"green"
);
}
EXPECT_DOC_END
();
DONE
();
return
" No exception was thrown"
;
}
}
// 6.22
// 6.18
TEST
GlobalTagPrefix
()
{
TEST
PrimaryTagHandle
()
HANDLE
(
ex6_22
);
{
EXPECT_DOC_START
();
HANDLE
(
ex6_18
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"tag:example.com,2000:app/foo"
,
0
,
"bar"
);
EXPECT_SCALAR
(
"!foo"
,
0
,
"bar"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
EXPECT_DOC_END
();
EXPECT_DOC_START
();
DONE
();
EXPECT_SCALAR
(
"tag:example.com,2000:app/foo"
,
0
,
"bar"
);
}
EXPECT_DOC_END
();
DONE
();
// 6.23
}
TEST
NodeProperties
()
{
HANDLE
(
ex6_23
);
// 6.19
EXPECT_DOC_START
();
TEST
SecondaryTagHandle
()
EXPECT_MAP_START
(
"?"
,
0
);
{
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
1
,
"foo"
);
HANDLE
(
ex6_19
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"bar"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
2
,
"baz"
);
EXPECT_SCALAR
(
"tag:example.com,2000:app/int"
,
0
,
"1 - 3"
);
EXPECT_ALIAS
(
1
);
EXPECT_DOC_END
();
EXPECT_MAP_END
();
DONE
();
EXPECT_DOC_END
();
}
DONE
();
}
// 6.20
TEST
TagHandles
()
// 6.24
{
TEST
VerbatimTags
()
{
HANDLE
(
ex6_20
);
HANDLE
(
ex6_24
);
EXPECT_DOC_START
();
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"tag:example.com,2000:app/foo"
,
0
,
"bar"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"foo"
);
DONE
();
EXPECT_SCALAR
(
"!bar"
,
0
,
"baz"
);
}
EXPECT_MAP_END
();
EXPECT_DOC_END
();
// 6.21
DONE
();
TEST
LocalTagPrefix
()
}
{
HANDLE
(
ex6_21
);
// 6.25
EXPECT_DOC_START
();
TEST
InvalidVerbatimTags
()
{
EXPECT_SCALAR
(
"!my-light"
,
0
,
"fluorescent"
);
HANDLE
(
ex6_25
);
EXPECT_DOC_END
();
return
" not implemented yet"
;
EXPECT_DOC_START
();
}
EXPECT_SCALAR
(
"!my-light"
,
0
,
"green"
);
EXPECT_DOC_END
();
// 6.26
DONE
();
TEST
TagShorthands
()
{
}
HANDLE
(
ex6_26
);
EXPECT_DOC_START
();
// 6.22
EXPECT_SEQ_START
(
"?"
,
0
);
TEST
GlobalTagPrefix
()
EXPECT_SCALAR
(
"!local"
,
0
,
"foo"
);
{
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"bar"
);
HANDLE
(
ex6_22
);
EXPECT_SCALAR
(
"tag:example.com,2000:app/tag%21"
,
0
,
"baz"
);
EXPECT_DOC_START
();
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"tag:example.com,2000:app/foo"
,
0
,
"bar"
);
DONE
();
EXPECT_SEQ_END
();
}
EXPECT_DOC_END
();
DONE
();
// 6.27
}
TEST
InvalidTagShorthands
()
{
bool
threw
=
false
;
// 6.23
try
{
TEST
NodeProperties
()
HANDLE
(
ex6_27a
);
{
}
HANDLE
(
ex6_23
);
catch
(
const
YAML
::
ParserException
&
e
)
{
EXPECT_DOC_START
();
threw
=
true
;
EXPECT_MAP_START
(
"?"
,
0
);
if
(
e
.
msg
!=
YAML
::
ErrorMsg
::
TAG_WITH_NO_SUFFIX
)
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
1
,
"foo"
);
throw
;
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"bar"
);
}
EXPECT_SCALAR
(
"?"
,
2
,
"baz"
);
EXPECT_ALIAS
(
1
);
if
(
!
threw
)
EXPECT_MAP_END
();
return
" No exception was thrown for a tag with no suffix"
;
EXPECT_DOC_END
();
DONE
();
HANDLE
(
}
ex6_27b
);
// TODO: should we reject this one (since !h! is not declared)?
return
" not implemented yet"
;
// 6.24
}
TEST
VerbatimTags
()
{
// 6.28
HANDLE
(
ex6_24
);
TEST
NonSpecificTags
()
{
EXPECT_DOC_START
();
HANDLE
(
ex6_28
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"foo"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!bar"
,
0
,
"baz"
);
EXPECT_SCALAR
(
"!"
,
0
,
"12"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"12"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"!"
,
0
,
"12"
);
DONE
();
EXPECT_SEQ_END
();
}
EXPECT_DOC_END
();
DONE
();
// 6.25
}
TEST
InvalidVerbatimTags
()
{
// 6.29
HANDLE
(
ex6_25
);
TEST
NodeAnchors
()
{
return
" not implemented yet"
;
HANDLE
(
ex6_29
);
}
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
// 6.26
EXPECT_SCALAR
(
"?"
,
0
,
"First occurrence"
);
TEST
TagShorthands
()
EXPECT_SCALAR
(
"?"
,
1
,
"Value"
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"Second occurrence"
);
HANDLE
(
ex6_26
);
EXPECT_ALIAS
(
1
);
EXPECT_DOC_START
();
EXPECT_MAP_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"!local"
,
0
,
"foo"
);
DONE
();
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"bar"
);
}
EXPECT_SCALAR
(
"tag:example.com,2000:app/tag%21"
,
0
,
"baz"
);
EXPECT_SEQ_END
();
// 7.1
EXPECT_DOC_END
();
TEST
AliasNodes
()
{
DONE
();
HANDLE
(
ex7_1
);
}
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
// 6.27
EXPECT_SCALAR
(
"?"
,
0
,
"First occurrence"
);
TEST
InvalidTagShorthands
()
EXPECT_SCALAR
(
"?"
,
1
,
"Foo"
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"Second occurrence"
);
bool
threw
=
false
;
EXPECT_ALIAS
(
1
);
try
{
EXPECT_SCALAR
(
"?"
,
0
,
"Override anchor"
);
HANDLE
(
ex6_27a
);
EXPECT_SCALAR
(
"?"
,
2
,
"Bar"
);
}
catch
(
const
YAML
::
ParserException
&
e
)
{
EXPECT_SCALAR
(
"?"
,
0
,
"Reuse anchor"
);
threw
=
true
;
EXPECT_ALIAS
(
2
);
if
(
e
.
msg
!=
YAML
::
ErrorMsg
::
TAG_WITH_NO_SUFFIX
)
EXPECT_MAP_END
();
throw
;
EXPECT_DOC_END
();
}
DONE
();
}
if
(
!
threw
)
return
" No exception was thrown for a tag with no suffix"
;
// 7.2
TEST
EmptyNodes
()
{
HANDLE
(
ex6_27b
);
// TODO: should we reject this one (since !h! is not declared)?
HANDLE
(
ex7_2
);
return
" not implemented yet"
;
EXPECT_DOC_START
();
}
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
// 6.28
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
""
);
TEST
NonSpecificTags
()
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
""
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
HANDLE
(
ex6_28
);
EXPECT_MAP_END
();
EXPECT_DOC_START
();
EXPECT_DOC_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
DONE
();
EXPECT_SCALAR
(
"!"
,
0
,
"12"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"12"
);
EXPECT_SCALAR
(
"!"
,
0
,
"12"
);
// 7.3
EXPECT_SEQ_END
();
TEST
CompletelyEmptyNodes
()
{
EXPECT_DOC_END
();
HANDLE
(
ex7_3
);
DONE
();
EXPECT_DOC_START
();
}
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
// 6.29
EXPECT_NULL
(
0
);
TEST
NodeAnchors
()
EXPECT_NULL
(
0
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
HANDLE
(
ex6_29
);
EXPECT_MAP_END
();
EXPECT_DOC_START
();
EXPECT_DOC_END
();
EXPECT_MAP_START
(
"?"
,
0
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"First occurrence"
);
}
EXPECT_SCALAR
(
"?"
,
1
,
"Value"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Second occurrence"
);
// 7.4
EXPECT_ALIAS
(
1
);
TEST
DoubleQuotedImplicitKeys
()
{
EXPECT_MAP_END
();
HANDLE
(
ex7_4
);
EXPECT_DOC_END
();
EXPECT_DOC_START
();
DONE
();
EXPECT_MAP_START
(
"?"
,
0
);
}
EXPECT_SCALAR
(
"!"
,
0
,
"implicit block key"
);
EXPECT_SEQ_START
(
"?"
,
0
);
// 7.1
EXPECT_MAP_START
(
"?"
,
0
);
TEST
AliasNodes
()
EXPECT_SCALAR
(
"!"
,
0
,
"implicit flow key"
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
HANDLE
(
ex7_1
);
EXPECT_MAP_END
();
EXPECT_DOC_START
();
EXPECT_SEQ_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"First occurrence"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
1
,
"Foo"
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"Second occurrence"
);
}
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"Override anchor"
);
// 7.5
EXPECT_SCALAR
(
"?"
,
2
,
"Bar"
);
TEST
DoubleQuotedLineBreaks
()
{
EXPECT_SCALAR
(
"?"
,
0
,
"Reuse anchor"
);
HANDLE
(
ex7_5
);
EXPECT_ALIAS
(
2
);
EXPECT_DOC_START
();
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"!"
,
0
,
EXPECT_DOC_END
();
"folded to a space,
\n
to a line feed, or
\t
\t
non-content"
);
DONE
();
EXPECT_DOC_END
();
}
DONE
();
}
// 7.2
TEST
EmptyNodes
()
// 7.6
{
TEST
DoubleQuotedLines
()
{
HANDLE
(
ex7_2
);
HANDLE
(
ex7_6
);
EXPECT_DOC_START
();
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
" 1st non-empty
\n
2nd non-empty 3rd non-empty "
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
""
);
DONE
();
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
""
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_MAP_END
();
// 7.7
EXPECT_DOC_END
();
TEST
SingleQuotedCharacters
()
{
DONE
();
HANDLE
(
ex7_7
);
}
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"here's to
\"
quotes
\"
"
);
// 7.3
EXPECT_DOC_END
();
TEST
CompletelyEmptyNodes
()
DONE
();
{
}
HANDLE
(
ex7_3
);
EXPECT_DOC_START
();
// 7.8
EXPECT_MAP_START
(
"?"
,
0
);
TEST
SingleQuotedImplicitKeys
()
{
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
HANDLE
(
ex7_8
);
EXPECT_NULL
(
0
);
EXPECT_DOC_START
();
EXPECT_NULL
(
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_SCALAR
(
"!"
,
0
,
"implicit block key"
);
EXPECT_MAP_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_MAP_START
(
"?"
,
0
);
DONE
();
EXPECT_SCALAR
(
"!"
,
0
,
"implicit flow key"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_MAP_END
();
// 7.4
EXPECT_SEQ_END
();
TEST
DoubleQuotedImplicitKeys
()
EXPECT_MAP_END
();
{
EXPECT_DOC_END
();
HANDLE
(
ex7_4
);
DONE
();
EXPECT_DOC_START
();
}
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"implicit block key"
);
// 7.9
EXPECT_SEQ_START
(
"?"
,
0
);
TEST
SingleQuotedLines
()
{
EXPECT_MAP_START
(
"?"
,
0
);
HANDLE
(
ex7_9
);
EXPECT_SCALAR
(
"!"
,
0
,
"implicit flow key"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_SCALAR
(
"!"
,
0
,
" 1st non-empty
\n
2nd non-empty 3rd non-empty "
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
EXPECT_SEQ_END
();
DONE
();
EXPECT_MAP_END
();
}
EXPECT_DOC_END
();
DONE
();
// 7.10
}
TEST
PlainCharacters
()
{
HANDLE
(
ex7_10
);
// 7.5
EXPECT_DOC_START
();
TEST
DoubleQuotedLineBreaks
()
EXPECT_SEQ_START
(
"?"
,
0
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"::vector"
);
HANDLE
(
ex7_5
);
EXPECT_SCALAR
(
"!"
,
0
,
": - ()"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"Up, up, and away!"
);
EXPECT_SCALAR
(
"!"
,
0
,
"folded to a space,
\n
to a line feed, or
\t
\t
non-content"
);
EXPECT_SCALAR
(
"?"
,
0
,
"-123"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"http://example.com/foo#bar"
);
DONE
();
EXPECT_SEQ_START
(
"?"
,
0
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"::vector"
);
EXPECT_SCALAR
(
"!"
,
0
,
": - ()"
);
// 7.6
EXPECT_SCALAR
(
"!"
,
0
,
"Up, up, and away!"
);
TEST
DoubleQuotedLines
()
EXPECT_SCALAR
(
"?"
,
0
,
"-123"
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"http://example.com/foo#bar"
);
HANDLE
(
ex7_6
);
EXPECT_SEQ_END
();
EXPECT_DOC_START
();
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"!"
,
0
,
" 1st non-empty
\n
2nd non-empty 3rd non-empty "
);
EXPECT_DOC_END
();
EXPECT_DOC_END
();
DONE
();
DONE
();
}
}
// 7.11
// 7.7
TEST
PlainImplicitKeys
()
{
TEST
SingleQuotedCharacters
()
HANDLE
(
ex7_11
);
{
EXPECT_DOC_START
();
HANDLE
(
ex7_7
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"implicit block key"
);
EXPECT_SCALAR
(
"!"
,
0
,
"here's to
\"
quotes
\"
"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_MAP_START
(
"?"
,
0
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"implicit flow key"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_MAP_END
();
// 7.8
EXPECT_SEQ_END
();
TEST
SingleQuotedImplicitKeys
()
EXPECT_MAP_END
();
{
EXPECT_DOC_END
();
HANDLE
(
ex7_8
);
DONE
();
EXPECT_DOC_START
();
}
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"implicit block key"
);
// 7.12
EXPECT_SEQ_START
(
"?"
,
0
);
TEST
PlainLines
()
{
EXPECT_MAP_START
(
"?"
,
0
);
HANDLE
(
ex7_12
);
EXPECT_SCALAR
(
"!"
,
0
,
"implicit flow key"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_SCALAR
(
"?"
,
0
,
"1st non-empty
\n
2nd non-empty 3rd non-empty"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
EXPECT_SEQ_END
();
DONE
();
EXPECT_MAP_END
();
}
EXPECT_DOC_END
();
DONE
();
// 7.13
}
TEST
FlowSequence
()
{
HANDLE
(
ex7_13
);
// 7.9
EXPECT_DOC_START
();
TEST
SingleQuotedLines
()
EXPECT_SEQ_START
(
"?"
,
0
);
{
EXPECT_SEQ_START
(
"?"
,
0
);
HANDLE
(
ex7_9
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SCALAR
(
"!"
,
0
,
" 1st non-empty
\n
2nd non-empty 3rd non-empty "
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"three"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"four"
);
EXPECT_SEQ_END
();
// 7.10
EXPECT_SEQ_END
();
TEST
PlainCharacters
()
EXPECT_DOC_END
();
{
DONE
();
HANDLE
(
ex7_10
);
}
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
// 7.14
EXPECT_SCALAR
(
"?"
,
0
,
"::vector"
);
TEST
FlowSequenceEntries
()
{
EXPECT_SCALAR
(
"!"
,
0
,
": - ()"
);
HANDLE
(
ex7_14
);
EXPECT_SCALAR
(
"?"
,
0
,
"Up, up, and away!"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"-123"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"http://example.com/foo#bar"
);
EXPECT_SCALAR
(
"!"
,
0
,
"double quoted"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"single quoted"
);
EXPECT_SCALAR
(
"?"
,
0
,
"::vector"
);
EXPECT_SCALAR
(
"?"
,
0
,
"plain text"
);
EXPECT_SCALAR
(
"!"
,
0
,
": - ()"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"Up, up, and away!"
);
EXPECT_SCALAR
(
"?"
,
0
,
"nested"
);
EXPECT_SCALAR
(
"?"
,
0
,
"-123"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"http://example.com/foo#bar"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"single"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"pair"
);
EXPECT_DOC_END
();
EXPECT_MAP_END
();
DONE
();
EXPECT_SEQ_END
();
}
EXPECT_DOC_END
();
DONE
();
// 7.11
}
TEST
PlainImplicitKeys
()
{
// 7.15
HANDLE
(
ex7_11
);
TEST
FlowMappings
()
{
EXPECT_DOC_START
();
HANDLE
(
ex7_15
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"implicit block key"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"implicit flow key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_SCALAR
(
"?"
,
0
,
"three"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"four"
);
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"five"
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"six"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"seven"
);
EXPECT_SCALAR
(
"?"
,
0
,
"eight"
);
// 7.12
EXPECT_MAP_END
();
TEST
PlainLines
()
EXPECT_SEQ_END
();
{
EXPECT_DOC_END
();
HANDLE
(
ex7_12
);
DONE
();
EXPECT_DOC_START
();
}
EXPECT_SCALAR
(
"?"
,
0
,
"1st non-empty
\n
2nd non-empty 3rd non-empty"
);
EXPECT_DOC_END
();
// 7.16
DONE
();
TEST
FlowMappingEntries
()
{
}
HANDLE
(
ex7_16
);
EXPECT_DOC_START
();
// 7.13
EXPECT_MAP_START
(
"?"
,
0
);
TEST
FlowSequence
()
EXPECT_SCALAR
(
"?"
,
0
,
"explicit"
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"entry"
);
HANDLE
(
ex7_13
);
EXPECT_SCALAR
(
"?"
,
0
,
"implicit"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"entry"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_NULL
(
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_DOC_END
();
EXPECT_SEQ_END
();
DONE
();
EXPECT_SEQ_START
(
"?"
,
0
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"three"
);
EXPECT_SCALAR
(
"?"
,
0
,
"four"
);
// 7.17
EXPECT_SEQ_END
();
TEST
FlowMappingSeparateValues
()
{
EXPECT_SEQ_END
();
HANDLE
(
ex7_17
);
EXPECT_DOC_END
();
EXPECT_DOC_START
();
DONE
();
EXPECT_MAP_START
(
"?"
,
0
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"unquoted"
);
EXPECT_SCALAR
(
"!"
,
0
,
"separate"
);
// 7.14
EXPECT_SCALAR
(
"?"
,
0
,
"http://foo.com"
);
TEST
FlowSequenceEntries
()
EXPECT_NULL
(
0
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"omitted value"
);
HANDLE
(
ex7_14
);
EXPECT_NULL
(
0
);
EXPECT_DOC_START
();
EXPECT_NULL
(
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"omitted key"
);
EXPECT_SCALAR
(
"!"
,
0
,
"double quoted"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"!"
,
0
,
"single quoted"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"plain text"
);
DONE
();
EXPECT_SEQ_START
(
"?"
,
0
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"nested"
);
EXPECT_SEQ_END
();
// 7.18
EXPECT_MAP_START
(
"?"
,
0
);
TEST
FlowMappingAdjacentValues
()
{
EXPECT_SCALAR
(
"?"
,
0
,
"single"
);
HANDLE
(
ex7_18
);
EXPECT_SCALAR
(
"?"
,
0
,
"pair"
);
EXPECT_DOC_START
();
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"!"
,
0
,
"adjacent"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
DONE
();
EXPECT_SCALAR
(
"!"
,
0
,
"readable"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_SCALAR
(
"!"
,
0
,
"empty"
);
// 7.15
EXPECT_NULL
(
0
);
TEST
FlowMappings
()
EXPECT_MAP_END
();
{
EXPECT_DOC_END
();
HANDLE
(
ex7_15
);
DONE
();
EXPECT_DOC_START
();
}
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
// 7.19
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
TEST
SinglePairFlowMappings
()
{
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
HANDLE
(
ex7_19
);
EXPECT_SCALAR
(
"?"
,
0
,
"three"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"four"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"?"
,
0
,
"five"
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_SCALAR
(
"?"
,
0
,
"six"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"seven"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"eight"
);
EXPECT_DOC_END
();
EXPECT_MAP_END
();
DONE
();
EXPECT_SEQ_END
();
}
EXPECT_DOC_END
();
DONE
();
// 7.20
}
TEST
SinglePairExplicitEntry
()
{
HANDLE
(
ex7_20
);
// 7.16
EXPECT_DOC_START
();
TEST
FlowMappingEntries
()
EXPECT_SEQ_START
(
"?"
,
0
);
{
EXPECT_MAP_START
(
"?"
,
0
);
HANDLE
(
ex7_16
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo bar"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"baz"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"explicit"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"entry"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"implicit"
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"entry"
);
}
EXPECT_NULL
(
0
);
EXPECT_NULL
(
0
);
// 7.21
EXPECT_MAP_END
();
TEST
SinglePairImplicitEntries
()
{
EXPECT_DOC_END
();
HANDLE
(
ex7_21
);
DONE
();
EXPECT_DOC_START
();
}
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
// 7.17
EXPECT_MAP_START
(
"?"
,
0
);
TEST
FlowMappingSeparateValues
()
EXPECT_SCALAR
(
"?"
,
0
,
"YAML"
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"separate"
);
HANDLE
(
ex7_17
);
EXPECT_MAP_END
();
EXPECT_DOC_START
();
EXPECT_SEQ_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"unquoted"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"separate"
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"http://foo.com"
);
EXPECT_SCALAR
(
"?"
,
0
,
"empty key entry"
);
EXPECT_NULL
(
0
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"omitted value"
);
EXPECT_SEQ_END
();
EXPECT_NULL
(
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_NULL
(
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"omitted key"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"JSON"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"like"
);
DONE
();
EXPECT_MAP_END
();
}
EXPECT_SCALAR
(
"?"
,
0
,
"adjacent"
);
EXPECT_MAP_END
();
// 7.18
EXPECT_SEQ_END
();
TEST
FlowMappingAdjacentValues
()
EXPECT_SEQ_END
();
{
EXPECT_DOC_END
();
HANDLE
(
ex7_18
);
DONE
();
EXPECT_DOC_START
();
}
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"adjacent"
);
// 7.22
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
TEST
InvalidImplicitKeys
()
{
EXPECT_SCALAR
(
"!"
,
0
,
"readable"
);
try
{
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
HANDLE
(
ex7_22
);
EXPECT_SCALAR
(
"!"
,
0
,
"empty"
);
}
EXPECT_NULL
(
0
);
catch
(
const
YAML
::
Exception
&
e
)
{
EXPECT_MAP_END
();
if
(
e
.
msg
==
YAML
::
ErrorMsg
::
END_OF_SEQ_FLOW
)
EXPECT_DOC_END
();
return
true
;
DONE
();
}
throw
;
}
// 7.19
return
" no exception thrown"
;
TEST
SinglePairFlowMappings
()
}
{
HANDLE
(
ex7_19
);
// 7.23
EXPECT_DOC_START
();
TEST
FlowContent
()
{
EXPECT_SEQ_START
(
"?"
,
0
);
HANDLE
(
ex7_23
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"a"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"b"
);
EXPECT_DOC_END
();
EXPECT_SEQ_END
();
DONE
();
EXPECT_MAP_START
(
"?"
,
0
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"a"
);
EXPECT_SCALAR
(
"?"
,
0
,
"b"
);
// 7.20
EXPECT_MAP_END
();
TEST
SinglePairExplicitEntry
()
EXPECT_SCALAR
(
"!"
,
0
,
"a"
);
{
EXPECT_SCALAR
(
"!"
,
0
,
"b"
);
HANDLE
(
ex7_20
);
EXPECT_SCALAR
(
"?"
,
0
,
"c"
);
EXPECT_DOC_START
();
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_MAP_START
(
"?"
,
0
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"foo bar"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"baz"
);
EXPECT_MAP_END
();
// 7.24
EXPECT_SEQ_END
();
TEST
FlowNodes
()
{
EXPECT_DOC_END
();
HANDLE
(
ex7_24
);
DONE
();
EXPECT_DOC_START
();
}
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"a"
);
// 7.21
EXPECT_SCALAR
(
"!"
,
0
,
"b"
);
TEST
SinglePairImplicitEntries
()
EXPECT_SCALAR
(
"!"
,
1
,
"c"
);
{
EXPECT_ALIAS
(
1
);
HANDLE
(
ex7_21
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
""
);
EXPECT_DOC_START
();
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
DONE
();
EXPECT_MAP_START
(
"?"
,
0
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"YAML"
);
EXPECT_SCALAR
(
"?"
,
0
,
"separate"
);
// 8.1
EXPECT_MAP_END
();
TEST
BlockScalarHeader
()
{
EXPECT_SEQ_END
();
HANDLE
(
ex8_1
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"literal
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"empty key entry"
);
EXPECT_SCALAR
(
"!"
,
0
,
" folded
\n
"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"!"
,
0
,
"keep
\n\n
"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"!"
,
0
,
" strip"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_MAP_START
(
"?"
,
0
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"JSON"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"like"
);
EXPECT_MAP_END
();
// 8.2
EXPECT_SCALAR
(
"?"
,
0
,
"adjacent"
);
TEST
BlockIndentationHeader
()
{
EXPECT_MAP_END
();
HANDLE
(
ex8_2
);
EXPECT_SEQ_END
();
EXPECT_DOC_START
();
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"!"
,
0
,
"detected
\n
"
);
DONE
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\n\n
# detected
\n
"
);
}
EXPECT_SCALAR
(
"!"
,
0
,
" explicit
\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\t\n
detected
\n
"
);
// 7.22
EXPECT_SEQ_END
();
TEST
InvalidImplicitKeys
()
EXPECT_DOC_END
();
{
DONE
();
try
{
}
HANDLE
(
ex7_22
);
}
catch
(
const
YAML
::
Exception
&
e
)
{
// 8.3
if
(
e
.
msg
==
YAML
::
ErrorMsg
::
END_OF_SEQ_FLOW
)
TEST
InvalidBlockScalarIndentationIndicators
()
{
return
true
;
{
bool
threw
=
false
;
throw
;
try
{
}
HANDLE
(
ex8_3a
);
return
" no exception thrown"
;
}
}
catch
(
const
YAML
::
Exception
&
e
)
{
if
(
e
.
msg
!=
YAML
::
ErrorMsg
::
END_OF_SEQ
)
// 7.23
throw
;
TEST
FlowContent
()
{
threw
=
true
;
HANDLE
(
ex7_23
);
}
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
if
(
!
threw
)
EXPECT_SEQ_START
(
"?"
,
0
);
return
" no exception thrown for less indented auto-detecting "
EXPECT_SCALAR
(
"?"
,
0
,
"a"
);
"indentation for a literal block scalar"
;
EXPECT_SCALAR
(
"?"
,
0
,
"b"
);
}
EXPECT_SEQ_END
();
EXPECT_MAP_START
(
"?"
,
0
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"a"
);
bool
threw
=
false
;
EXPECT_SCALAR
(
"?"
,
0
,
"b"
);
try
{
EXPECT_MAP_END
();
HANDLE
(
ex8_3b
);
EXPECT_SCALAR
(
"!"
,
0
,
"a"
);
}
EXPECT_SCALAR
(
"!"
,
0
,
"b"
);
catch
(
const
YAML
::
Exception
&
e
)
{
EXPECT_SCALAR
(
"?"
,
0
,
"c"
);
if
(
e
.
msg
!=
YAML
::
ErrorMsg
::
END_OF_SEQ
)
EXPECT_SEQ_END
();
throw
;
EXPECT_DOC_END
();
DONE
();
threw
=
true
;
}
}
// 7.24
if
(
!
threw
)
TEST
FlowNodes
()
return
" no exception thrown for less indented auto-detecting "
{
"indentation for a folded block scalar"
;
HANDLE
(
ex7_24
);
}
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
{
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"a"
);
bool
threw
=
false
;
EXPECT_SCALAR
(
"!"
,
0
,
"b"
);
try
{
EXPECT_SCALAR
(
"!"
,
1
,
"c"
);
HANDLE
(
ex8_3c
);
EXPECT_ALIAS
(
1
);
}
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
""
);
catch
(
const
YAML
::
Exception
&
e
)
{
EXPECT_SEQ_END
();
if
(
e
.
msg
!=
YAML
::
ErrorMsg
::
END_OF_SEQ
)
EXPECT_DOC_END
();
throw
;
DONE
();
}
threw
=
true
;
}
// 8.1
TEST
BlockScalarHeader
()
if
(
!
threw
)
{
return
" no exception thrown for less indented explicit indentation for "
HANDLE
(
ex8_1
);
"a literal block scalar"
;
EXPECT_DOC_START
();
}
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"literal
\n
"
);
return
true
;
EXPECT_SCALAR
(
"!"
,
0
,
" folded
\n
"
);
}
EXPECT_SCALAR
(
"!"
,
0
,
"keep
\n\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
" strip"
);
// 8.4
EXPECT_SEQ_END
();
TEST
ChompingFinalLineBreak
()
{
EXPECT_DOC_END
();
HANDLE
(
ex8_4
);
DONE
();
EXPECT_DOC_START
();
}
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"strip"
);
// 8.2
EXPECT_SCALAR
(
"!"
,
0
,
"text"
);
TEST
BlockIndentationHeader
()
EXPECT_SCALAR
(
"?"
,
0
,
"clip"
);
{
EXPECT_SCALAR
(
"!"
,
0
,
"text
\n
"
);
HANDLE
(
ex8_2
);
EXPECT_SCALAR
(
"?"
,
0
,
"keep"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"text
\n
"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"!"
,
0
,
"detected
\n
"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\n\n
# detected
\n
"
);
DONE
();
EXPECT_SCALAR
(
"!"
,
0
,
" explicit
\n
"
);
}
EXPECT_SCALAR
(
"!"
,
0
,
"
\t\n
detected
\n
"
);
EXPECT_SEQ_END
();
// 8.5
EXPECT_DOC_END
();
TEST
ChompingTrailingLines
()
{
DONE
();
HANDLE
(
ex8_5
);
}
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
// 8.3
EXPECT_SCALAR
(
"?"
,
0
,
"strip"
);
TEST
InvalidBlockScalarIndentationIndicators
()
EXPECT_SCALAR
(
"!"
,
0
,
"# text"
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"clip"
);
{
EXPECT_SCALAR
(
"!"
,
0
,
"# text
\n
"
);
bool
threw
=
false
;
EXPECT_SCALAR
(
"?"
,
0
,
"keep"
);
try
{
EXPECT_SCALAR
(
"!"
,
0
,
"# text
\n
"
);
// Note: I believe this is a bug in the
HANDLE
(
ex8_3a
);
// YAML spec - it should be "# text\n\n"
}
catch
(
const
YAML
::
Exception
&
e
)
{
EXPECT_MAP_END
();
if
(
e
.
msg
!=
YAML
::
ErrorMsg
::
END_OF_SEQ
)
EXPECT_DOC_END
();
throw
;
DONE
();
}
threw
=
true
;
}
// 8.6
TEST
EmptyScalarChomping
()
{
if
(
!
threw
)
HANDLE
(
ex8_6
);
return
" no exception thrown for less indented auto-detecting indentation for a literal block scalar"
;
EXPECT_DOC_START
();
}
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"strip"
);
{
EXPECT_SCALAR
(
"!"
,
0
,
""
);
bool
threw
=
false
;
EXPECT_SCALAR
(
"?"
,
0
,
"clip"
);
try
{
EXPECT_SCALAR
(
"!"
,
0
,
""
);
HANDLE
(
ex8_3b
);
EXPECT_SCALAR
(
"?"
,
0
,
"keep"
);
}
catch
(
const
YAML
::
Exception
&
e
)
{
EXPECT_SCALAR
(
"!"
,
0
,
"
\n
"
);
if
(
e
.
msg
!=
YAML
::
ErrorMsg
::
END_OF_SEQ
)
EXPECT_MAP_END
();
throw
;
EXPECT_DOC_END
();
DONE
();
threw
=
true
;
}
}
// 8.7
if
(
!
threw
)
TEST
LiteralScalar
()
{
return
" no exception thrown for less indented auto-detecting indentation for a folded block scalar"
;
HANDLE
(
ex8_7
);
}
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"literal
\n\t
text
\n
"
);
{
EXPECT_DOC_END
();
bool
threw
=
false
;
DONE
();
try
{
}
HANDLE
(
ex8_3c
);
}
catch
(
const
YAML
::
Exception
&
e
)
{
// 8.8
if
(
e
.
msg
!=
YAML
::
ErrorMsg
::
END_OF_SEQ
)
TEST
LiteralContent
()
{
throw
;
HANDLE
(
ex8_8
);
EXPECT_DOC_START
();
threw
=
true
;
EXPECT_SCALAR
(
"!"
,
0
,
"
\n\n
literal
\n
\n\n
text
\n
"
);
}
EXPECT_DOC_END
();
DONE
();
if
(
!
threw
)
}
return
" no exception thrown for less indented explicit indentation for a literal block scalar"
;
}
// 8.9
TEST
FoldedScalar
()
{
return
true
;
HANDLE
(
ex8_9
);
}
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"folded text
\n
"
);
// 8.4
EXPECT_DOC_END
();
TEST
ChompingFinalLineBreak
()
DONE
();
{
}
HANDLE
(
ex8_4
);
EXPECT_DOC_START
();
// 8.10
EXPECT_MAP_START
(
"?"
,
0
);
TEST
FoldedLines
()
{
EXPECT_SCALAR
(
"?"
,
0
,
"strip"
);
HANDLE
(
ex8_10
);
EXPECT_SCALAR
(
"!"
,
0
,
"text"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"clip"
);
EXPECT_SCALAR
(
"!"
,
0
,
EXPECT_SCALAR
(
"!"
,
0
,
"text
\n
"
);
"
\n
folded line
\n
next line
\n
* bullet
\n\n
* list
\n
* "
EXPECT_SCALAR
(
"?"
,
0
,
"keep"
);
"lines
\n\n
last line
\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
"text
\n
"
);
EXPECT_DOC_END
();
EXPECT_MAP_END
();
DONE
();
EXPECT_DOC_END
();
}
DONE
();
}
// 8.11
TEST
MoreIndentedLines
()
{
// 8.5
HANDLE
(
ex8_11
);
TEST
ChompingTrailingLines
()
EXPECT_DOC_START
();
{
EXPECT_SCALAR
(
"!"
,
0
,
HANDLE
(
ex8_5
);
"
\n
folded line
\n
next line
\n
* bullet
\n\n
* list
\n
* "
EXPECT_DOC_START
();
"lines
\n\n
last line
\n
"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"strip"
);
DONE
();
EXPECT_SCALAR
(
"!"
,
0
,
"# text"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"clip"
);
EXPECT_SCALAR
(
"!"
,
0
,
"# text
\n
"
);
// 8.12
EXPECT_SCALAR
(
"?"
,
0
,
"keep"
);
TEST
EmptySeparationLines
()
{
EXPECT_SCALAR
(
"!"
,
0
,
"# text
\n
"
);
// Note: I believe this is a bug in the YAML spec - it should be "# text\n\n"
HANDLE
(
ex8_12
);
EXPECT_MAP_END
();
EXPECT_DOC_START
();
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"!"
,
0
,
DONE
();
"
\n
folded line
\n
next line
\n
* bullet
\n\n
* list
\n
* "
}
"lines
\n\n
last line
\n
"
);
EXPECT_DOC_END
();
// 8.6
DONE
();
TEST
EmptyScalarChomping
()
}
{
HANDLE
(
ex8_6
);
// 8.13
EXPECT_DOC_START
();
TEST
FinalEmptyLines
()
{
EXPECT_MAP_START
(
"?"
,
0
);
HANDLE
(
ex8_13
);
EXPECT_SCALAR
(
"?"
,
0
,
"strip"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
""
);
EXPECT_SCALAR
(
"!"
,
0
,
EXPECT_SCALAR
(
"?"
,
0
,
"clip"
);
"
\n
folded line
\n
next line
\n
* bullet
\n\n
* list
\n
* "
EXPECT_SCALAR
(
"!"
,
0
,
""
);
"lines
\n\n
last line
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"keep"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\n
"
);
DONE
();
EXPECT_MAP_END
();
}
EXPECT_DOC_END
();
DONE
();
// 8.14
}
TEST
BlockSequence
()
{
HANDLE
(
ex8_14
);
// 8.7
EXPECT_DOC_START
();
TEST
LiteralScalar
()
EXPECT_MAP_START
(
"?"
,
0
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"block sequence"
);
HANDLE
(
ex8_7
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"!"
,
0
,
"literal
\n\t
text
\n
"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"three"
);
}
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
// 8.8
EXPECT_MAP_END
();
TEST
LiteralContent
()
EXPECT_DOC_END
();
{
DONE
();
HANDLE
(
ex8_8
);
}
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\n\n
literal
\n
\n\n
text
\n
"
);
// 8.15
EXPECT_DOC_END
();
TEST
BlockSequenceEntryTypes
()
{
DONE
();
HANDLE
(
ex8_15
);
}
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
// 8.9
EXPECT_NULL
(
0
);
TEST
FoldedScalar
()
EXPECT_SCALAR
(
"!"
,
0
,
"block node
\n
"
);
{
EXPECT_SEQ_START
(
"?"
,
0
);
HANDLE
(
ex8_9
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SCALAR
(
"!"
,
0
,
"folded text
\n
"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
EXPECT_MAP_START
(
"?"
,
0
);
DONE
();
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
}
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_MAP_END
();
// 8.10
EXPECT_SEQ_END
();
TEST
FoldedLines
()
EXPECT_DOC_END
();
{
DONE
();
HANDLE
(
ex8_10
);
}
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\n
folded line
\n
next line
\n
* bullet
\n\n
* list
\n
* lines
\n\n
last line
\n
"
);
// 8.16
EXPECT_DOC_END
();
TEST
BlockMappings
()
{
DONE
();
HANDLE
(
ex8_16
);
}
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
// 8.11
EXPECT_SCALAR
(
"?"
,
0
,
"block mapping"
);
TEST
MoreIndentedLines
()
EXPECT_MAP_START
(
"?"
,
0
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"key"
);
HANDLE
(
ex8_11
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_DOC_START
();
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\n
folded line
\n
next line
\n
* bullet
\n\n
* list
\n
* lines
\n\n
last line
\n
"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
EXPECT_DOC_END
();
DONE
();
DONE
();
}
}
// 8.12
// 8.17
TEST
EmptySeparationLines
()
TEST
ExplicitBlockMappingEntries
()
{
{
HANDLE
(
ex8_17
);
HANDLE
(
ex8_12
);
EXPECT_DOC_START
();
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\n
folded line
\n
next line
\n
* bullet
\n\n
* list
\n
* lines
\n\n
last line
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"explicit key"
);
EXPECT_DOC_END
();
EXPECT_NULL
(
0
);
DONE
();
EXPECT_SCALAR
(
"!"
,
0
,
"block key
\n
"
);
}
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
// 8.13
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
TEST
FinalEmptyLines
()
EXPECT_SEQ_END
();
{
EXPECT_MAP_END
();
HANDLE
(
ex8_13
);
EXPECT_DOC_END
();
EXPECT_DOC_START
();
DONE
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\n
folded line
\n
next line
\n
* bullet
\n\n
* list
\n
* lines
\n\n
last line
\n
"
);
}
EXPECT_DOC_END
();
DONE
();
// 8.18
}
TEST
ImplicitBlockMappingEntries
()
{
HANDLE
(
ex8_18
);
// 8.14
EXPECT_DOC_START
();
TEST
BlockSequence
()
EXPECT_MAP_START
(
"?"
,
0
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"plain key"
);
HANDLE
(
ex8_14
);
EXPECT_SCALAR
(
"?"
,
0
,
"in-line value"
);
EXPECT_DOC_START
();
EXPECT_NULL
(
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"block sequence"
);
EXPECT_SCALAR
(
"!"
,
0
,
"quoted key"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"entry"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"three"
);
EXPECT_DOC_END
();
EXPECT_MAP_END
();
DONE
();
EXPECT_SEQ_END
();
}
EXPECT_MAP_END
();
EXPECT_DOC_END
();
// 8.19
DONE
();
TEST
CompactBlockMappings
()
{
}
HANDLE
(
ex8_19
);
EXPECT_DOC_START
();
// 8.15
EXPECT_SEQ_START
(
"?"
,
0
);
TEST
BlockSequenceEntryTypes
()
EXPECT_MAP_START
(
"?"
,
0
);
{
EXPECT_SCALAR
(
"?"
,
0
,
"sun"
);
HANDLE
(
ex8_15
);
EXPECT_SCALAR
(
"?"
,
0
,
"yellow"
);
EXPECT_DOC_START
();
EXPECT_MAP_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_NULL
(
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"block node
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"earth"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"blue"
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"moon"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"white"
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
EXPECT_DOC_END
();
DONE
();
DONE
();
}
}
// 8.20
// 8.16
TEST
BlockNodeTypes
()
{
TEST
BlockMappings
()
HANDLE
(
ex8_20
);
{
EXPECT_DOC_START
();
HANDLE
(
ex8_16
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"flow in block"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"Block scalar
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"block mapping"
);
EXPECT_MAP_START
(
"tag:yaml.org,2002:map"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"?"
,
0
,
"key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
EXPECT_DOC_END
();
DONE
();
DONE
();
}
}
// 8.21
// 8.17
TEST
BlockScalarNodes
()
{
TEST
ExplicitBlockMappingEntries
()
HANDLE
(
ex8_21
);
{
EXPECT_DOC_START
();
HANDLE
(
ex8_17
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"literal"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"value"
);
// Note: I believe this is a bug in the YAML
EXPECT_SCALAR
(
"?"
,
0
,
"explicit key"
);
// spec - it should be "value\n"
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"folded"
);
EXPECT_SCALAR
(
"!"
,
0
,
"block key
\n
"
);
EXPECT_SCALAR
(
"!foo"
,
0
,
"value"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_DOC_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
DONE
();
EXPECT_SEQ_END
();
}
EXPECT_MAP_END
();
EXPECT_DOC_END
();
// 8.22
DONE
();
TEST
BlockCollectionNodes
()
{
}
HANDLE
(
ex8_22
);
EXPECT_DOC_START
();
// 8.18
EXPECT_MAP_START
(
"?"
,
0
);
TEST
ImplicitBlockMappingEntries
()
EXPECT_SCALAR
(
"?"
,
0
,
"sequence"
);
{
EXPECT_SEQ_START
(
"tag:yaml.org,2002:seq"
,
0
);
HANDLE
(
ex8_18
);
EXPECT_SCALAR
(
"?"
,
0
,
"entry"
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"tag:yaml.org,2002:seq"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"nested"
);
EXPECT_SCALAR
(
"?"
,
0
,
"plain key"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"in-line value"
);
EXPECT_SEQ_END
();
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"mapping"
);
EXPECT_NULL
(
0
);
EXPECT_MAP_START
(
"tag:yaml.org,2002:map"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"quoted key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_SCALAR
(
"?"
,
0
,
"entry"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
EXPECT_DOC_END
();
DONE
();
DONE
();
}
}
}
// 8.19
TEST
CompactBlockMappings
()
{
HANDLE
(
ex8_19
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sun"
);
EXPECT_SCALAR
(
"?"
,
0
,
"yellow"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"earth"
);
EXPECT_SCALAR
(
"?"
,
0
,
"blue"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"moon"
);
EXPECT_SCALAR
(
"?"
,
0
,
"white"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.20
TEST
BlockNodeTypes
()
{
HANDLE
(
ex8_20
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"flow in block"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Block scalar
\n
"
);
EXPECT_MAP_START
(
"tag:yaml.org,2002:map"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.21
TEST
BlockScalarNodes
()
{
HANDLE
(
ex8_21
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"literal"
);
EXPECT_SCALAR
(
"!"
,
0
,
"value"
);
// Note: I believe this is a bug in the YAML spec - it should be "value\n"
EXPECT_SCALAR
(
"?"
,
0
,
"folded"
);
EXPECT_SCALAR
(
"!foo"
,
0
,
"value"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.22
TEST
BlockCollectionNodes
()
{
HANDLE
(
ex8_22
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sequence"
);
EXPECT_SEQ_START
(
"tag:yaml.org,2002:seq"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"entry"
);
EXPECT_SEQ_START
(
"tag:yaml.org,2002:seq"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"nested"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"mapping"
);
EXPECT_MAP_START
(
"tag:yaml.org,2002:map"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
}
}
}
test/emittertests.h
View file @
3355bbb3
#ifndef EMITTERTESTS_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#ifndef EMITTERTESTS_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define EMITTERTESTS_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define EMITTERTESTS_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#pragma once
#endif
#endif
namespace
Test
{
namespace
Test
{
bool
RunEmitterTests
();
bool
RunEmitterTests
();
}
}
#endif // EMITTERTESTS_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif // EMITTERTESTS_H_62B23520_7C8E_11DE_8A39_0800200C9A66
Prev
1
2
3
4
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment