Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
gaoqiong
yaml-cpp
Commits
3355bbb3
Commit
3355bbb3
authored
Mar 22, 2014
by
Jesse Beder
Browse files
Merge clang-format from core
parents
5b889311
9b4db068
Changes
72
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
4153 additions
and
4224 deletions
+4153
-4224
src/scanner.h
src/scanner.h
+125
-115
src/scanscalar.cpp
src/scanscalar.cpp
+211
-207
src/scanscalar.h
src/scanscalar.h
+60
-33
src/scantag.cpp
src/scantag.cpp
+72
-77
src/scantag.h
src/scantag.h
+8
-9
src/scantoken.cpp
src/scantoken.cpp
+422
-429
src/setting.h
src/setting.h
+87
-93
src/simplekey.cpp
src/simplekey.cpp
+121
-132
src/singledocparser.cpp
src/singledocparser.cpp
+390
-381
src/singledocparser.h
src/singledocparser.h
+48
-49
src/stream.cpp
src/stream.cpp
+432
-435
src/stream.h
src/stream.h
+59
-56
src/streamcharsource.h
src/streamcharsource.h
+37
-37
src/stringsource.h
src/stringsource.h
+38
-37
src/tag.cpp
src/tag.cpp
+40
-44
src/tag.h
src/tag.h
+22
-17
src/token.h
src/token.h
+57
-68
test/core/parsertests.cpp
test/core/parsertests.cpp
+69
-70
test/core/spectests.cpp
test/core/spectests.cpp
+1850
-1931
test/emittertests.h
test/emittertests.h
+5
-4
No files found.
src/scanner.h
View file @
3355bbb3
#ifndef SCANNER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define SCANNER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#endif
#include <ios>
#include <string>
#include <queue>
...
...
@@ -16,118 +17,127 @@
#include "stream.h"
#include "token.h"
namespace
YAML
{
class
Node
;
class
RegEx
;
class
Scanner
{
public:
Scanner
(
std
::
istream
&
in
);
~
Scanner
();
// token queue management (hopefully this looks kinda stl-ish)
bool
empty
();
void
pop
();
Token
&
peek
();
Mark
mark
()
const
;
private:
struct
IndentMarker
{
enum
INDENT_TYPE
{
MAP
,
SEQ
,
NONE
};
enum
STATUS
{
VALID
,
INVALID
,
UNKNOWN
};
IndentMarker
(
int
column_
,
INDENT_TYPE
type_
)
:
column
(
column_
),
type
(
type_
),
status
(
VALID
),
pStartToken
(
0
)
{}
int
column
;
INDENT_TYPE
type
;
STATUS
status
;
Token
*
pStartToken
;
};
enum
FLOW_MARKER
{
FLOW_MAP
,
FLOW_SEQ
};
private:
// scanning
void
EnsureTokensInQueue
();
void
ScanNextToken
();
void
ScanToNextToken
();
void
StartStream
();
void
EndStream
();
Token
*
PushToken
(
Token
::
TYPE
type
);
bool
InFlowContext
()
const
{
return
!
m_flows
.
empty
();
}
bool
InBlockContext
()
const
{
return
m_flows
.
empty
();
}
int
GetFlowLevel
()
const
{
return
m_flows
.
size
();
}
Token
::
TYPE
GetStartTokenFor
(
IndentMarker
::
INDENT_TYPE
type
)
const
;
IndentMarker
*
PushIndentTo
(
int
column
,
IndentMarker
::
INDENT_TYPE
type
);
void
PopIndentToHere
();
void
PopAllIndents
();
void
PopIndent
();
int
GetTopIndent
()
const
;
// checking input
bool
CanInsertPotentialSimpleKey
()
const
;
bool
ExistsActiveSimpleKey
()
const
;
void
InsertPotentialSimpleKey
();
void
InvalidateSimpleKey
();
bool
VerifySimpleKey
();
void
PopAllSimpleKeys
();
void
ThrowParserException
(
const
std
::
string
&
msg
)
const
;
bool
IsWhitespaceToBeEaten
(
char
ch
);
const
RegEx
&
GetValueRegex
()
const
;
struct
SimpleKey
{
SimpleKey
(
const
Mark
&
mark_
,
int
flowLevel_
);
void
Validate
();
void
Invalidate
();
Mark
mark
;
int
flowLevel
;
IndentMarker
*
pIndent
;
Token
*
pMapStart
,
*
pKey
;
};
// and the tokens
void
ScanDirective
();
void
ScanDocStart
();
void
ScanDocEnd
();
void
ScanBlockSeqStart
();
void
ScanBlockMapSTart
();
void
ScanBlockEnd
();
void
ScanBlockEntry
();
void
ScanFlowStart
();
void
ScanFlowEnd
();
void
ScanFlowEntry
();
void
ScanKey
();
void
ScanValue
();
void
ScanAnchorOrAlias
();
void
ScanTag
();
void
ScanPlainScalar
();
void
ScanQuotedScalar
();
void
ScanBlockScalar
();
private:
// the stream
Stream
INPUT
;
// the output (tokens)
std
::
queue
<
Token
>
m_tokens
;
// state info
bool
m_startedStream
,
m_endedStream
;
bool
m_simpleKeyAllowed
;
bool
m_canBeJSONFlow
;
std
::
stack
<
SimpleKey
>
m_simpleKeys
;
std
::
stack
<
IndentMarker
*>
m_indents
;
ptr_vector
<
IndentMarker
>
m_indentRefs
;
// for "garbage collection"
std
::
stack
<
FLOW_MARKER
>
m_flows
;
};
namespace
YAML
{
class
Node
;
class
RegEx
;
class
Scanner
{
public:
Scanner
(
std
::
istream
&
in
);
~
Scanner
();
// token queue management (hopefully this looks kinda stl-ish)
bool
empty
();
void
pop
();
Token
&
peek
();
Mark
mark
()
const
;
private:
struct
IndentMarker
{
enum
INDENT_TYPE
{
MAP
,
SEQ
,
NONE
};
enum
STATUS
{
VALID
,
INVALID
,
UNKNOWN
};
IndentMarker
(
int
column_
,
INDENT_TYPE
type_
)
:
column
(
column_
),
type
(
type_
),
status
(
VALID
),
pStartToken
(
0
)
{}
int
column
;
INDENT_TYPE
type
;
STATUS
status
;
Token
*
pStartToken
;
};
enum
FLOW_MARKER
{
FLOW_MAP
,
FLOW_SEQ
};
private:
// scanning
void
EnsureTokensInQueue
();
void
ScanNextToken
();
void
ScanToNextToken
();
void
StartStream
();
void
EndStream
();
Token
*
PushToken
(
Token
::
TYPE
type
);
bool
InFlowContext
()
const
{
return
!
m_flows
.
empty
();
}
bool
InBlockContext
()
const
{
return
m_flows
.
empty
();
}
int
GetFlowLevel
()
const
{
return
m_flows
.
size
();
}
Token
::
TYPE
GetStartTokenFor
(
IndentMarker
::
INDENT_TYPE
type
)
const
;
IndentMarker
*
PushIndentTo
(
int
column
,
IndentMarker
::
INDENT_TYPE
type
);
void
PopIndentToHere
();
void
PopAllIndents
();
void
PopIndent
();
int
GetTopIndent
()
const
;
// checking input
bool
CanInsertPotentialSimpleKey
()
const
;
bool
ExistsActiveSimpleKey
()
const
;
void
InsertPotentialSimpleKey
();
void
InvalidateSimpleKey
();
bool
VerifySimpleKey
();
void
PopAllSimpleKeys
();
void
ThrowParserException
(
const
std
::
string
&
msg
)
const
;
bool
IsWhitespaceToBeEaten
(
char
ch
);
const
RegEx
&
GetValueRegex
()
const
;
struct
SimpleKey
{
SimpleKey
(
const
Mark
&
mark_
,
int
flowLevel_
);
void
Validate
();
void
Invalidate
();
Mark
mark
;
int
flowLevel
;
IndentMarker
*
pIndent
;
Token
*
pMapStart
,
*
pKey
;
};
// and the tokens
void
ScanDirective
();
void
ScanDocStart
();
void
ScanDocEnd
();
void
ScanBlockSeqStart
();
void
ScanBlockMapSTart
();
void
ScanBlockEnd
();
void
ScanBlockEntry
();
void
ScanFlowStart
();
void
ScanFlowEnd
();
void
ScanFlowEntry
();
void
ScanKey
();
void
ScanValue
();
void
ScanAnchorOrAlias
();
void
ScanTag
();
void
ScanPlainScalar
();
void
ScanQuotedScalar
();
void
ScanBlockScalar
();
private:
// the stream
Stream
INPUT
;
// the output (tokens)
std
::
queue
<
Token
>
m_tokens
;
// state info
bool
m_startedStream
,
m_endedStream
;
bool
m_simpleKeyAllowed
;
bool
m_canBeJSONFlow
;
std
::
stack
<
SimpleKey
>
m_simpleKeys
;
std
::
stack
<
IndentMarker
*>
m_indents
;
ptr_vector
<
IndentMarker
>
m_indentRefs
;
// for "garbage collection"
std
::
stack
<
FLOW_MARKER
>
m_flows
;
};
}
#endif // SCANNER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif // SCANNER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/scanscalar.cpp
View file @
3355bbb3
...
...
@@ -4,211 +4,215 @@
#include "yaml-cpp/exceptions.h"
#include "token.h"
namespace
YAML
{
// ScanScalar
// . This is where the scalar magic happens.
//
// . We do the scanning in three phases:
// 1. Scan until newline
// 2. Eat newline
// 3. Scan leading blanks.
//
// . Depending on the parameters given, we store or stop
// and different places in the above flow.
std
::
string
ScanScalar
(
Stream
&
INPUT
,
ScanScalarParams
&
params
)
{
bool
foundNonEmptyLine
=
false
;
bool
pastOpeningBreak
=
(
params
.
fold
==
FOLD_FLOW
);
bool
emptyLine
=
false
,
moreIndented
=
false
;
int
foldedNewlineCount
=
0
;
bool
foldedNewlineStartedMoreIndented
=
false
;
std
::
size_t
lastEscapedChar
=
std
::
string
::
npos
;
std
::
string
scalar
;
params
.
leadingSpaces
=
false
;
while
(
INPUT
)
{
// ********************************
// Phase #1: scan until line ending
std
::
size_t
lastNonWhitespaceChar
=
scalar
.
size
();
bool
escapedNewline
=
false
;
while
(
!
params
.
end
.
Matches
(
INPUT
)
&&
!
Exp
::
Break
().
Matches
(
INPUT
))
{
if
(
!
INPUT
)
break
;
// document indicator?
if
(
INPUT
.
column
()
==
0
&&
Exp
::
DocIndicator
().
Matches
(
INPUT
))
{
if
(
params
.
onDocIndicator
==
BREAK
)
break
;
else
if
(
params
.
onDocIndicator
==
THROW
)
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
DOC_IN_SCALAR
);
}
foundNonEmptyLine
=
true
;
pastOpeningBreak
=
true
;
// escaped newline? (only if we're escaping on slash)
if
(
params
.
escape
==
'\\'
&&
Exp
::
EscBreak
().
Matches
(
INPUT
))
{
// eat escape character and get out (but preserve trailing whitespace!)
INPUT
.
get
();
lastNonWhitespaceChar
=
scalar
.
size
();
lastEscapedChar
=
scalar
.
size
();
escapedNewline
=
true
;
break
;
}
// escape this?
if
(
INPUT
.
peek
()
==
params
.
escape
)
{
scalar
+=
Exp
::
Escape
(
INPUT
);
lastNonWhitespaceChar
=
scalar
.
size
();
lastEscapedChar
=
scalar
.
size
();
continue
;
}
// otherwise, just add the damn character
char
ch
=
INPUT
.
get
();
scalar
+=
ch
;
if
(
ch
!=
' '
&&
ch
!=
'\t'
)
lastNonWhitespaceChar
=
scalar
.
size
();
}
// eof? if we're looking to eat something, then we throw
if
(
!
INPUT
)
{
if
(
params
.
eatEnd
)
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
EOF_IN_SCALAR
);
break
;
}
// doc indicator?
if
(
params
.
onDocIndicator
==
BREAK
&&
INPUT
.
column
()
==
0
&&
Exp
::
DocIndicator
().
Matches
(
INPUT
))
break
;
// are we done via character match?
int
n
=
params
.
end
.
Match
(
INPUT
);
if
(
n
>=
0
)
{
if
(
params
.
eatEnd
)
INPUT
.
eat
(
n
);
break
;
}
// do we remove trailing whitespace?
if
(
params
.
fold
==
FOLD_FLOW
)
scalar
.
erase
(
lastNonWhitespaceChar
);
// ********************************
// Phase #2: eat line ending
n
=
Exp
::
Break
().
Match
(
INPUT
);
INPUT
.
eat
(
n
);
// ********************************
// Phase #3: scan initial spaces
// first the required indentation
while
(
INPUT
.
peek
()
==
' '
&&
(
INPUT
.
column
()
<
params
.
indent
||
(
params
.
detectIndent
&&
!
foundNonEmptyLine
)))
INPUT
.
eat
(
1
);
// update indent if we're auto-detecting
if
(
params
.
detectIndent
&&
!
foundNonEmptyLine
)
params
.
indent
=
std
::
max
(
params
.
indent
,
INPUT
.
column
());
// and then the rest of the whitespace
while
(
Exp
::
Blank
().
Matches
(
INPUT
))
{
// we check for tabs that masquerade as indentation
if
(
INPUT
.
peek
()
==
'\t'
&&
INPUT
.
column
()
<
params
.
indent
&&
params
.
onTabInIndentation
==
THROW
)
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
TAB_IN_INDENTATION
);
if
(
!
params
.
eatLeadingWhitespace
)
break
;
INPUT
.
eat
(
1
);
}
// was this an empty line?
bool
nextEmptyLine
=
Exp
::
Break
().
Matches
(
INPUT
);
bool
nextMoreIndented
=
Exp
::
Blank
().
Matches
(
INPUT
);
if
(
params
.
fold
==
FOLD_BLOCK
&&
foldedNewlineCount
==
0
&&
nextEmptyLine
)
foldedNewlineStartedMoreIndented
=
moreIndented
;
// for block scalars, we always start with a newline, so we should ignore it (not fold or keep)
if
(
pastOpeningBreak
)
{
switch
(
params
.
fold
)
{
case
DONT_FOLD
:
scalar
+=
"
\n
"
;
break
;
case
FOLD_BLOCK
:
if
(
!
emptyLine
&&
!
nextEmptyLine
&&
!
moreIndented
&&
!
nextMoreIndented
&&
INPUT
.
column
()
>=
params
.
indent
)
scalar
+=
" "
;
else
if
(
nextEmptyLine
)
foldedNewlineCount
++
;
else
scalar
+=
"
\n
"
;
if
(
!
nextEmptyLine
&&
foldedNewlineCount
>
0
)
{
scalar
+=
std
::
string
(
foldedNewlineCount
-
1
,
'\n'
);
if
(
foldedNewlineStartedMoreIndented
||
nextMoreIndented
|
!
foundNonEmptyLine
)
scalar
+=
"
\n
"
;
foldedNewlineCount
=
0
;
}
break
;
case
FOLD_FLOW
:
if
(
nextEmptyLine
)
scalar
+=
"
\n
"
;
else
if
(
!
emptyLine
&&
!
nextEmptyLine
&&
!
escapedNewline
)
scalar
+=
" "
;
break
;
}
}
emptyLine
=
nextEmptyLine
;
moreIndented
=
nextMoreIndented
;
pastOpeningBreak
=
true
;
// are we done via indentation?
if
(
!
emptyLine
&&
INPUT
.
column
()
<
params
.
indent
)
{
params
.
leadingSpaces
=
true
;
break
;
}
}
// post-processing
if
(
params
.
trimTrailingSpaces
)
{
std
::
size_t
pos
=
scalar
.
find_last_not_of
(
' '
);
if
(
lastEscapedChar
!=
std
::
string
::
npos
)
{
if
(
pos
<
lastEscapedChar
||
pos
==
std
::
string
::
npos
)
pos
=
lastEscapedChar
;
}
if
(
pos
<
scalar
.
size
())
scalar
.
erase
(
pos
+
1
);
}
switch
(
params
.
chomp
)
{
case
CLIP
:
{
std
::
size_t
pos
=
scalar
.
find_last_not_of
(
'\n'
);
if
(
lastEscapedChar
!=
std
::
string
::
npos
)
{
if
(
pos
<
lastEscapedChar
||
pos
==
std
::
string
::
npos
)
pos
=
lastEscapedChar
;
}
if
(
pos
==
std
::
string
::
npos
)
scalar
.
erase
();
else
if
(
pos
+
1
<
scalar
.
size
())
scalar
.
erase
(
pos
+
2
);
}
break
;
case
STRIP
:
{
std
::
size_t
pos
=
scalar
.
find_last_not_of
(
'\n'
);
if
(
lastEscapedChar
!=
std
::
string
::
npos
)
{
if
(
pos
<
lastEscapedChar
||
pos
==
std
::
string
::
npos
)
pos
=
lastEscapedChar
;
}
if
(
pos
==
std
::
string
::
npos
)
scalar
.
erase
();
else
if
(
pos
<
scalar
.
size
())
scalar
.
erase
(
pos
+
1
);
}
break
;
default:
break
;
}
return
scalar
;
}
namespace
YAML
{
// ScanScalar
// . This is where the scalar magic happens.
//
// . We do the scanning in three phases:
// 1. Scan until newline
// 2. Eat newline
// 3. Scan leading blanks.
//
// . Depending on the parameters given, we store or stop
// and different places in the above flow.
std
::
string
ScanScalar
(
Stream
&
INPUT
,
ScanScalarParams
&
params
)
{
bool
foundNonEmptyLine
=
false
;
bool
pastOpeningBreak
=
(
params
.
fold
==
FOLD_FLOW
);
bool
emptyLine
=
false
,
moreIndented
=
false
;
int
foldedNewlineCount
=
0
;
bool
foldedNewlineStartedMoreIndented
=
false
;
std
::
size_t
lastEscapedChar
=
std
::
string
::
npos
;
std
::
string
scalar
;
params
.
leadingSpaces
=
false
;
while
(
INPUT
)
{
// ********************************
// Phase #1: scan until line ending
std
::
size_t
lastNonWhitespaceChar
=
scalar
.
size
();
bool
escapedNewline
=
false
;
while
(
!
params
.
end
.
Matches
(
INPUT
)
&&
!
Exp
::
Break
().
Matches
(
INPUT
))
{
if
(
!
INPUT
)
break
;
// document indicator?
if
(
INPUT
.
column
()
==
0
&&
Exp
::
DocIndicator
().
Matches
(
INPUT
))
{
if
(
params
.
onDocIndicator
==
BREAK
)
break
;
else
if
(
params
.
onDocIndicator
==
THROW
)
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
DOC_IN_SCALAR
);
}
foundNonEmptyLine
=
true
;
pastOpeningBreak
=
true
;
// escaped newline? (only if we're escaping on slash)
if
(
params
.
escape
==
'\\'
&&
Exp
::
EscBreak
().
Matches
(
INPUT
))
{
// eat escape character and get out (but preserve trailing whitespace!)
INPUT
.
get
();
lastNonWhitespaceChar
=
scalar
.
size
();
lastEscapedChar
=
scalar
.
size
();
escapedNewline
=
true
;
break
;
}
// escape this?
if
(
INPUT
.
peek
()
==
params
.
escape
)
{
scalar
+=
Exp
::
Escape
(
INPUT
);
lastNonWhitespaceChar
=
scalar
.
size
();
lastEscapedChar
=
scalar
.
size
();
continue
;
}
// otherwise, just add the damn character
char
ch
=
INPUT
.
get
();
scalar
+=
ch
;
if
(
ch
!=
' '
&&
ch
!=
'\t'
)
lastNonWhitespaceChar
=
scalar
.
size
();
}
// eof? if we're looking to eat something, then we throw
if
(
!
INPUT
)
{
if
(
params
.
eatEnd
)
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
EOF_IN_SCALAR
);
break
;
}
// doc indicator?
if
(
params
.
onDocIndicator
==
BREAK
&&
INPUT
.
column
()
==
0
&&
Exp
::
DocIndicator
().
Matches
(
INPUT
))
break
;
// are we done via character match?
int
n
=
params
.
end
.
Match
(
INPUT
);
if
(
n
>=
0
)
{
if
(
params
.
eatEnd
)
INPUT
.
eat
(
n
);
break
;
}
// do we remove trailing whitespace?
if
(
params
.
fold
==
FOLD_FLOW
)
scalar
.
erase
(
lastNonWhitespaceChar
);
// ********************************
// Phase #2: eat line ending
n
=
Exp
::
Break
().
Match
(
INPUT
);
INPUT
.
eat
(
n
);
// ********************************
// Phase #3: scan initial spaces
// first the required indentation
while
(
INPUT
.
peek
()
==
' '
&&
(
INPUT
.
column
()
<
params
.
indent
||
(
params
.
detectIndent
&&
!
foundNonEmptyLine
)))
INPUT
.
eat
(
1
);
// update indent if we're auto-detecting
if
(
params
.
detectIndent
&&
!
foundNonEmptyLine
)
params
.
indent
=
std
::
max
(
params
.
indent
,
INPUT
.
column
());
// and then the rest of the whitespace
while
(
Exp
::
Blank
().
Matches
(
INPUT
))
{
// we check for tabs that masquerade as indentation
if
(
INPUT
.
peek
()
==
'\t'
&&
INPUT
.
column
()
<
params
.
indent
&&
params
.
onTabInIndentation
==
THROW
)
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
TAB_IN_INDENTATION
);
if
(
!
params
.
eatLeadingWhitespace
)
break
;
INPUT
.
eat
(
1
);
}
// was this an empty line?
bool
nextEmptyLine
=
Exp
::
Break
().
Matches
(
INPUT
);
bool
nextMoreIndented
=
Exp
::
Blank
().
Matches
(
INPUT
);
if
(
params
.
fold
==
FOLD_BLOCK
&&
foldedNewlineCount
==
0
&&
nextEmptyLine
)
foldedNewlineStartedMoreIndented
=
moreIndented
;
// for block scalars, we always start with a newline, so we should ignore it
// (not fold or keep)
if
(
pastOpeningBreak
)
{
switch
(
params
.
fold
)
{
case
DONT_FOLD
:
scalar
+=
"
\n
"
;
break
;
case
FOLD_BLOCK
:
if
(
!
emptyLine
&&
!
nextEmptyLine
&&
!
moreIndented
&&
!
nextMoreIndented
&&
INPUT
.
column
()
>=
params
.
indent
)
scalar
+=
" "
;
else
if
(
nextEmptyLine
)
foldedNewlineCount
++
;
else
scalar
+=
"
\n
"
;
if
(
!
nextEmptyLine
&&
foldedNewlineCount
>
0
)
{
scalar
+=
std
::
string
(
foldedNewlineCount
-
1
,
'\n'
);
if
(
foldedNewlineStartedMoreIndented
||
nextMoreIndented
|
!
foundNonEmptyLine
)
scalar
+=
"
\n
"
;
foldedNewlineCount
=
0
;
}
break
;
case
FOLD_FLOW
:
if
(
nextEmptyLine
)
scalar
+=
"
\n
"
;
else
if
(
!
emptyLine
&&
!
nextEmptyLine
&&
!
escapedNewline
)
scalar
+=
" "
;
break
;
}
}
emptyLine
=
nextEmptyLine
;
moreIndented
=
nextMoreIndented
;
pastOpeningBreak
=
true
;
// are we done via indentation?
if
(
!
emptyLine
&&
INPUT
.
column
()
<
params
.
indent
)
{
params
.
leadingSpaces
=
true
;
break
;
}
}
// post-processing
if
(
params
.
trimTrailingSpaces
)
{
std
::
size_t
pos
=
scalar
.
find_last_not_of
(
' '
);
if
(
lastEscapedChar
!=
std
::
string
::
npos
)
{
if
(
pos
<
lastEscapedChar
||
pos
==
std
::
string
::
npos
)
pos
=
lastEscapedChar
;
}
if
(
pos
<
scalar
.
size
())
scalar
.
erase
(
pos
+
1
);
}
switch
(
params
.
chomp
)
{
case
CLIP
:
{
std
::
size_t
pos
=
scalar
.
find_last_not_of
(
'\n'
);
if
(
lastEscapedChar
!=
std
::
string
::
npos
)
{
if
(
pos
<
lastEscapedChar
||
pos
==
std
::
string
::
npos
)
pos
=
lastEscapedChar
;
}
if
(
pos
==
std
::
string
::
npos
)
scalar
.
erase
();
else
if
(
pos
+
1
<
scalar
.
size
())
scalar
.
erase
(
pos
+
2
);
}
break
;
case
STRIP
:
{
std
::
size_t
pos
=
scalar
.
find_last_not_of
(
'\n'
);
if
(
lastEscapedChar
!=
std
::
string
::
npos
)
{
if
(
pos
<
lastEscapedChar
||
pos
==
std
::
string
::
npos
)
pos
=
lastEscapedChar
;
}
if
(
pos
==
std
::
string
::
npos
)
scalar
.
erase
();
else
if
(
pos
<
scalar
.
size
())
scalar
.
erase
(
pos
+
1
);
}
break
;
default:
break
;
}
return
scalar
;
}
}
src/scanscalar.h
View file @
3355bbb3
#ifndef SCANSCALAR_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define SCANSCALAR_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#endif
#include <string>
#include "regex.h"
#include "stream.h"
namespace
YAML
{
enum
CHOMP
{
STRIP
=
-
1
,
CLIP
,
KEEP
};
enum
ACTION
{
NONE
,
BREAK
,
THROW
};
enum
FOLD
{
DONT_FOLD
,
FOLD_BLOCK
,
FOLD_FLOW
};
struct
ScanScalarParams
{
ScanScalarParams
()
:
eatEnd
(
false
),
indent
(
0
),
detectIndent
(
false
),
eatLeadingWhitespace
(
0
),
escape
(
0
),
fold
(
DONT_FOLD
)
,
trimTrailingSpaces
(
0
),
chomp
(
CLIP
),
onDocIndicator
(
NONE
),
onTabInIndentation
(
NONE
),
leadingSpaces
(
false
)
{}
// input:
RegEx
end
;
// what condition ends this scalar?
bool
eatEnd
;
// should we eat that condition when we see it?
int
indent
;
// what level of indentation should be eaten and ignored?
bool
detectIndent
;
// should we try to autodetect the indent?
bool
eatLeadingWhitespace
;
// should we continue eating this delicious indentation after 'indent' spaces?
char
escape
;
// what character do we escape on (i.e., slash or single quote) (0 for none)
FOLD
fold
;
// how do we fold line ends?
bool
trimTrailingSpaces
;
// do we remove all trailing spaces (at the very end
)
CHOMP
chomp
;
// do we strip, clip, or keep trailing newlines (at the very end)
// Note: strip means kill all, clip means keep at most one, keep means keep all
ACTION
onDocIndicator
;
// what do we do if we see a document indicator?
ACTION
onTabInIndentation
;
// what do we do if we see a tab where we should be seeing indentation
space
s
// output:
bool
lead
ingSpaces
;
};
std
::
string
ScanScalar
(
Stream
&
INPUT
,
ScanScalarParams
&
info
);
}
namespace
YAML
{
enum
CHOMP
{
STRIP
=
-
1
,
CLIP
,
KEEP
};
enum
ACTION
{
NONE
,
BREAK
,
THROW
};
enum
FOLD
{
DONT_FOLD
,
FOLD_BLOCK
,
FOLD_FLOW
};
struct
ScanScalarParams
{
ScanScalarParams
(
)
:
eatEnd
(
false
),
indent
(
0
),
detectIndent
(
false
),
eatLeadingWhite
space
(
0
),
escape
(
0
),
fold
(
DONT_FOLD
),
trimTrail
ingSpaces
(
0
),
chomp
(
CLIP
),
onDocIndicator
(
NONE
),
onTabInIndentation
(
NONE
),
leadingSpaces
(
false
)
{
}
#endif // SCANSCALAR_H_62B23520_7C8E_11DE_8A39_0800200C9A66
// input:
RegEx
end
;
// what condition ends this scalar?
bool
eatEnd
;
// should we eat that condition when we see it?
int
indent
;
// what level of indentation should be eaten and ignored?
bool
detectIndent
;
// should we try to autodetect the indent?
bool
eatLeadingWhitespace
;
// should we continue eating this delicious
// indentation after 'indent' spaces?
char
escape
;
// what character do we escape on (i.e., slash or single quote)
// (0 for none)
FOLD
fold
;
// how do we fold line ends?
bool
trimTrailingSpaces
;
// do we remove all trailing spaces (at the very
// end)
CHOMP
chomp
;
// do we strip, clip, or keep trailing newlines (at the very
// end)
// Note: strip means kill all, clip means keep at most one, keep means keep
// all
ACTION
onDocIndicator
;
// what do we do if we see a document indicator?
ACTION
onTabInIndentation
;
// what do we do if we see a tab where we should
// be seeing indentation spaces
// output:
bool
leadingSpaces
;
};
std
::
string
ScanScalar
(
Stream
&
INPUT
,
ScanScalarParams
&
info
);
}
#endif // SCANSCALAR_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/scantag.cpp
View file @
3355bbb3
...
...
@@ -3,82 +3,77 @@
#include "exp.h"
#include "yaml-cpp/exceptions.h"
namespace
YAML
{
const
std
::
string
ScanVerbatimTag
(
Stream
&
INPUT
)
{
std
::
string
tag
;
// eat the start character
INPUT
.
get
();
while
(
INPUT
)
{
if
(
INPUT
.
peek
()
==
Keys
::
VerbatimTagEnd
)
{
// eat the end character
INPUT
.
get
();
return
tag
;
}
int
n
=
Exp
::
URI
().
Match
(
INPUT
);
if
(
n
<=
0
)
break
;
tag
+=
INPUT
.
get
(
n
);
}
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
END_OF_VERBATIM_TAG
);
}
const
std
::
string
ScanTagHandle
(
Stream
&
INPUT
,
bool
&
canBeHandle
)
{
std
::
string
tag
;
canBeHandle
=
true
;
Mark
firstNonWordChar
;
while
(
INPUT
)
{
if
(
INPUT
.
peek
()
==
Keys
::
Tag
)
{
if
(
!
canBeHandle
)
throw
ParserException
(
firstNonWordChar
,
ErrorMsg
::
CHAR_IN_TAG_HANDLE
);
break
;
}
int
n
=
0
;
if
(
canBeHandle
)
{
n
=
Exp
::
Word
().
Match
(
INPUT
);
if
(
n
<=
0
)
{
canBeHandle
=
false
;
firstNonWordChar
=
INPUT
.
mark
();
}
}
if
(
!
canBeHandle
)
n
=
Exp
::
Tag
().
Match
(
INPUT
);
if
(
n
<=
0
)
break
;
tag
+=
INPUT
.
get
(
n
);
}
return
tag
;
}
const
std
::
string
ScanTagSuffix
(
Stream
&
INPUT
)
{
std
::
string
tag
;
while
(
INPUT
)
{
int
n
=
Exp
::
Tag
().
Match
(
INPUT
);
if
(
n
<=
0
)
break
;
tag
+=
INPUT
.
get
(
n
);
}
if
(
tag
.
empty
())
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
TAG_WITH_NO_SUFFIX
);
return
tag
;
}
namespace
YAML
{
const
std
::
string
ScanVerbatimTag
(
Stream
&
INPUT
)
{
std
::
string
tag
;
// eat the start character
INPUT
.
get
();
while
(
INPUT
)
{
if
(
INPUT
.
peek
()
==
Keys
::
VerbatimTagEnd
)
{
// eat the end character
INPUT
.
get
();
return
tag
;
}
int
n
=
Exp
::
URI
().
Match
(
INPUT
);
if
(
n
<=
0
)
break
;
tag
+=
INPUT
.
get
(
n
);
}
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
END_OF_VERBATIM_TAG
);
}
const
std
::
string
ScanTagHandle
(
Stream
&
INPUT
,
bool
&
canBeHandle
)
{
std
::
string
tag
;
canBeHandle
=
true
;
Mark
firstNonWordChar
;
while
(
INPUT
)
{
if
(
INPUT
.
peek
()
==
Keys
::
Tag
)
{
if
(
!
canBeHandle
)
throw
ParserException
(
firstNonWordChar
,
ErrorMsg
::
CHAR_IN_TAG_HANDLE
);
break
;
}
int
n
=
0
;
if
(
canBeHandle
)
{
n
=
Exp
::
Word
().
Match
(
INPUT
);
if
(
n
<=
0
)
{
canBeHandle
=
false
;
firstNonWordChar
=
INPUT
.
mark
();
}
}
if
(
!
canBeHandle
)
n
=
Exp
::
Tag
().
Match
(
INPUT
);
if
(
n
<=
0
)
break
;
tag
+=
INPUT
.
get
(
n
);
}
return
tag
;
}
const
std
::
string
ScanTagSuffix
(
Stream
&
INPUT
)
{
std
::
string
tag
;
while
(
INPUT
)
{
int
n
=
Exp
::
Tag
().
Match
(
INPUT
);
if
(
n
<=
0
)
break
;
tag
+=
INPUT
.
get
(
n
);
}
if
(
tag
.
empty
())
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
TAG_WITH_NO_SUFFIX
);
return
tag
;
}
}
src/scantag.h
View file @
3355bbb3
#ifndef SCANTAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define SCANTAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#endif
#include <string>
#include "stream.h"
namespace
YAML
{
const
std
::
string
ScanVerbatimTag
(
Stream
&
INPUT
);
const
std
::
string
ScanTagHandle
(
Stream
&
INPUT
,
bool
&
canBeHandle
);
const
std
::
string
ScanTagSuffix
(
Stream
&
INPUT
);
namespace
YAML
{
const
std
::
string
ScanVerbatimTag
(
Stream
&
INPUT
);
const
std
::
string
ScanTagHandle
(
Stream
&
INPUT
,
bool
&
canBeHandle
);
const
std
::
string
ScanTagSuffix
(
Stream
&
INPUT
);
}
#endif // SCANTAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif // SCANTAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/scantoken.cpp
View file @
3355bbb3
...
...
@@ -7,433 +7,426 @@
#include "tag.h"
#include <sstream>
namespace
YAML
{
///////////////////////////////////////////////////////////////////////
// Specialization for scanning specific tokens
// Directive
// . Note: no semantic checking is done here (that's for the parser to do)
void
Scanner
::
ScanDirective
()
{
std
::
string
name
;
std
::
vector
<
std
::
string
>
params
;
// pop indents and simple keys
PopAllIndents
();
PopAllSimpleKeys
();
m_simpleKeyAllowed
=
false
;
m_canBeJSONFlow
=
false
;
// store pos and eat indicator
Token
token
(
Token
::
DIRECTIVE
,
INPUT
.
mark
());
INPUT
.
eat
(
1
);
// read name
while
(
INPUT
&&
!
Exp
::
BlankOrBreak
().
Matches
(
INPUT
))
token
.
value
+=
INPUT
.
get
();
// read parameters
while
(
1
)
{
// first get rid of whitespace
while
(
Exp
::
Blank
().
Matches
(
INPUT
))
INPUT
.
eat
(
1
);
// break on newline or comment
if
(
!
INPUT
||
Exp
::
Break
().
Matches
(
INPUT
)
||
Exp
::
Comment
().
Matches
(
INPUT
))
break
;
// now read parameter
std
::
string
param
;
while
(
INPUT
&&
!
Exp
::
BlankOrBreak
().
Matches
(
INPUT
))
param
+=
INPUT
.
get
();
token
.
params
.
push_back
(
param
);
}
m_tokens
.
push
(
token
);
}
// DocStart
void
Scanner
::
ScanDocStart
()
{
PopAllIndents
();
PopAllSimpleKeys
();
m_simpleKeyAllowed
=
false
;
m_canBeJSONFlow
=
false
;
// eat
Mark
mark
=
INPUT
.
mark
();
INPUT
.
eat
(
3
);
m_tokens
.
push
(
Token
(
Token
::
DOC_START
,
mark
));
}
// DocEnd
void
Scanner
::
ScanDocEnd
()
{
PopAllIndents
();
PopAllSimpleKeys
();
m_simpleKeyAllowed
=
false
;
m_canBeJSONFlow
=
false
;
// eat
Mark
mark
=
INPUT
.
mark
();
INPUT
.
eat
(
3
);
m_tokens
.
push
(
Token
(
Token
::
DOC_END
,
mark
));
}
// FlowStart
void
Scanner
::
ScanFlowStart
()
{
// flows can be simple keys
InsertPotentialSimpleKey
();
m_simpleKeyAllowed
=
true
;
m_canBeJSONFlow
=
false
;
// eat
Mark
mark
=
INPUT
.
mark
();
char
ch
=
INPUT
.
get
();
FLOW_MARKER
flowType
=
(
ch
==
Keys
::
FlowSeqStart
?
FLOW_SEQ
:
FLOW_MAP
);
m_flows
.
push
(
flowType
);
Token
::
TYPE
type
=
(
flowType
==
FLOW_SEQ
?
Token
::
FLOW_SEQ_START
:
Token
::
FLOW_MAP_START
);
m_tokens
.
push
(
Token
(
type
,
mark
));
}
// FlowEnd
void
Scanner
::
ScanFlowEnd
()
{
if
(
InBlockContext
())
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
FLOW_END
);
// we might have a solo entry in the flow context
if
(
InFlowContext
())
{
if
(
m_flows
.
top
()
==
FLOW_MAP
&&
VerifySimpleKey
())
m_tokens
.
push
(
Token
(
Token
::
VALUE
,
INPUT
.
mark
()));
else
if
(
m_flows
.
top
()
==
FLOW_SEQ
)
InvalidateSimpleKey
();
}
m_simpleKeyAllowed
=
false
;
m_canBeJSONFlow
=
true
;
// eat
Mark
mark
=
INPUT
.
mark
();
char
ch
=
INPUT
.
get
();
// check that it matches the start
FLOW_MARKER
flowType
=
(
ch
==
Keys
::
FlowSeqEnd
?
FLOW_SEQ
:
FLOW_MAP
);
if
(
m_flows
.
top
()
!=
flowType
)
throw
ParserException
(
mark
,
ErrorMsg
::
FLOW_END
);
m_flows
.
pop
();
Token
::
TYPE
type
=
(
flowType
?
Token
::
FLOW_SEQ_END
:
Token
::
FLOW_MAP_END
);
m_tokens
.
push
(
Token
(
type
,
mark
));
}
// FlowEntry
void
Scanner
::
ScanFlowEntry
()
{
// we might have a solo entry in the flow context
if
(
InFlowContext
())
{
if
(
m_flows
.
top
()
==
FLOW_MAP
&&
VerifySimpleKey
())
m_tokens
.
push
(
Token
(
Token
::
VALUE
,
INPUT
.
mark
()));
else
if
(
m_flows
.
top
()
==
FLOW_SEQ
)
InvalidateSimpleKey
();
}
m_simpleKeyAllowed
=
true
;
m_canBeJSONFlow
=
false
;
// eat
Mark
mark
=
INPUT
.
mark
();
INPUT
.
eat
(
1
);
m_tokens
.
push
(
Token
(
Token
::
FLOW_ENTRY
,
mark
));
}
// BlockEntry
void
Scanner
::
ScanBlockEntry
()
{
// we better be in the block context!
if
(
InFlowContext
())
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
BLOCK_ENTRY
);
// can we put it here?
if
(
!
m_simpleKeyAllowed
)
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
BLOCK_ENTRY
);
PushIndentTo
(
INPUT
.
column
(),
IndentMarker
::
SEQ
);
m_simpleKeyAllowed
=
true
;
m_canBeJSONFlow
=
false
;
// eat
Mark
mark
=
INPUT
.
mark
();
INPUT
.
eat
(
1
);
m_tokens
.
push
(
Token
(
Token
::
BLOCK_ENTRY
,
mark
));
}
// Key
void
Scanner
::
ScanKey
()
{
// handle keys diffently in the block context (and manage indents)
if
(
InBlockContext
())
{
if
(
!
m_simpleKeyAllowed
)
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
MAP_KEY
);
PushIndentTo
(
INPUT
.
column
(),
IndentMarker
::
MAP
);
}
// can only put a simple key here if we're in block context
m_simpleKeyAllowed
=
InBlockContext
();
// eat
Mark
mark
=
INPUT
.
mark
();
INPUT
.
eat
(
1
);
m_tokens
.
push
(
Token
(
Token
::
KEY
,
mark
));
}
// Value
void
Scanner
::
ScanValue
()
{
// and check that simple key
bool
isSimpleKey
=
VerifySimpleKey
();
m_canBeJSONFlow
=
false
;
if
(
isSimpleKey
)
{
// can't follow a simple key with another simple key (dunno why, though - it seems fine)
m_simpleKeyAllowed
=
false
;
}
else
{
// handle values diffently in the block context (and manage indents)
if
(
InBlockContext
())
{
if
(
!
m_simpleKeyAllowed
)
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
MAP_VALUE
);
PushIndentTo
(
INPUT
.
column
(),
IndentMarker
::
MAP
);
}
// can only put a simple key here if we're in block context
m_simpleKeyAllowed
=
InBlockContext
();
}
// eat
Mark
mark
=
INPUT
.
mark
();
INPUT
.
eat
(
1
);
m_tokens
.
push
(
Token
(
Token
::
VALUE
,
mark
));
}
// AnchorOrAlias
void
Scanner
::
ScanAnchorOrAlias
()
{
bool
alias
;
std
::
string
name
;
// insert a potential simple key
InsertPotentialSimpleKey
();
m_simpleKeyAllowed
=
false
;
m_canBeJSONFlow
=
false
;
// eat the indicator
Mark
mark
=
INPUT
.
mark
();
char
indicator
=
INPUT
.
get
();
alias
=
(
indicator
==
Keys
::
Alias
);
// now eat the content
while
(
INPUT
&&
Exp
::
Anchor
().
Matches
(
INPUT
))
name
+=
INPUT
.
get
();
// we need to have read SOMETHING!
if
(
name
.
empty
())
throw
ParserException
(
INPUT
.
mark
(),
alias
?
ErrorMsg
::
ALIAS_NOT_FOUND
:
ErrorMsg
::
ANCHOR_NOT_FOUND
);
// and needs to end correctly
if
(
INPUT
&&
!
Exp
::
AnchorEnd
().
Matches
(
INPUT
))
throw
ParserException
(
INPUT
.
mark
(),
alias
?
ErrorMsg
::
CHAR_IN_ALIAS
:
ErrorMsg
::
CHAR_IN_ANCHOR
);
// and we're done
Token
token
(
alias
?
Token
::
ALIAS
:
Token
::
ANCHOR
,
mark
);
token
.
value
=
name
;
m_tokens
.
push
(
token
);
}
// Tag
void
Scanner
::
ScanTag
()
{
// insert a potential simple key
InsertPotentialSimpleKey
();
m_simpleKeyAllowed
=
false
;
m_canBeJSONFlow
=
false
;
Token
token
(
Token
::
TAG
,
INPUT
.
mark
());
// eat the indicator
INPUT
.
get
();
if
(
INPUT
&&
INPUT
.
peek
()
==
Keys
::
VerbatimTagStart
){
std
::
string
tag
=
ScanVerbatimTag
(
INPUT
);
token
.
value
=
tag
;
token
.
data
=
Tag
::
VERBATIM
;
}
else
{
bool
canBeHandle
;
token
.
value
=
ScanTagHandle
(
INPUT
,
canBeHandle
);
if
(
!
canBeHandle
&&
token
.
value
.
empty
())
token
.
data
=
Tag
::
NON_SPECIFIC
;
else
if
(
token
.
value
.
empty
())
token
.
data
=
Tag
::
SECONDARY_HANDLE
;
else
token
.
data
=
Tag
::
PRIMARY_HANDLE
;
// is there a suffix?
if
(
canBeHandle
&&
INPUT
.
peek
()
==
Keys
::
Tag
)
{
// eat the indicator
INPUT
.
get
();
token
.
params
.
push_back
(
ScanTagSuffix
(
INPUT
));
token
.
data
=
Tag
::
NAMED_HANDLE
;
}
}
m_tokens
.
push
(
token
);
}
// PlainScalar
void
Scanner
::
ScanPlainScalar
()
{
std
::
string
scalar
;
// set up the scanning parameters
ScanScalarParams
params
;
params
.
end
=
(
InFlowContext
()
?
Exp
::
EndScalarInFlow
()
:
Exp
::
EndScalar
())
||
(
Exp
::
BlankOrBreak
()
+
Exp
::
Comment
());
params
.
eatEnd
=
false
;
params
.
indent
=
(
InFlowContext
()
?
0
:
GetTopIndent
()
+
1
);
params
.
fold
=
FOLD_FLOW
;
params
.
eatLeadingWhitespace
=
true
;
params
.
trimTrailingSpaces
=
true
;
params
.
chomp
=
STRIP
;
params
.
onDocIndicator
=
BREAK
;
params
.
onTabInIndentation
=
THROW
;
// insert a potential simple key
InsertPotentialSimpleKey
();
Mark
mark
=
INPUT
.
mark
();
scalar
=
ScanScalar
(
INPUT
,
params
);
// can have a simple key only if we ended the scalar by starting a new line
m_simpleKeyAllowed
=
params
.
leadingSpaces
;
m_canBeJSONFlow
=
false
;
// finally, check and see if we ended on an illegal character
//if(Exp::IllegalCharInScalar.Matches(INPUT))
// throw ParserException(INPUT.mark(), ErrorMsg::CHAR_IN_SCALAR);
Token
token
(
Token
::
PLAIN_SCALAR
,
mark
);
token
.
value
=
scalar
;
m_tokens
.
push
(
token
);
}
// QuotedScalar
void
Scanner
::
ScanQuotedScalar
()
{
std
::
string
scalar
;
// peek at single or double quote (don't eat because we need to preserve (for the time being) the input position)
char
quote
=
INPUT
.
peek
();
bool
single
=
(
quote
==
'\''
);
// setup the scanning parameters
ScanScalarParams
params
;
params
.
end
=
(
single
?
RegEx
(
quote
)
&&
!
Exp
::
EscSingleQuote
()
:
RegEx
(
quote
));
params
.
eatEnd
=
true
;
params
.
escape
=
(
single
?
'\''
:
'\\'
);
params
.
indent
=
0
;
params
.
fold
=
FOLD_FLOW
;
params
.
eatLeadingWhitespace
=
true
;
params
.
trimTrailingSpaces
=
false
;
params
.
chomp
=
CLIP
;
params
.
onDocIndicator
=
THROW
;
// insert a potential simple key
InsertPotentialSimpleKey
();
Mark
mark
=
INPUT
.
mark
();
// now eat that opening quote
INPUT
.
get
();
// and scan
scalar
=
ScanScalar
(
INPUT
,
params
);
m_simpleKeyAllowed
=
false
;
m_canBeJSONFlow
=
true
;
Token
token
(
Token
::
NON_PLAIN_SCALAR
,
mark
);
token
.
value
=
scalar
;
m_tokens
.
push
(
token
);
}
// BlockScalarToken
// . These need a little extra processing beforehand.
// . We need to scan the line where the indicator is (this doesn't count as part of the scalar),
// and then we need to figure out what level of indentation we'll be using.
void
Scanner
::
ScanBlockScalar
()
{
std
::
string
scalar
;
ScanScalarParams
params
;
params
.
indent
=
1
;
params
.
detectIndent
=
true
;
// eat block indicator ('|' or '>')
Mark
mark
=
INPUT
.
mark
();
char
indicator
=
INPUT
.
get
();
params
.
fold
=
(
indicator
==
Keys
::
FoldedScalar
?
FOLD_BLOCK
:
DONT_FOLD
);
// eat chomping/indentation indicators
params
.
chomp
=
CLIP
;
int
n
=
Exp
::
Chomp
().
Match
(
INPUT
);
for
(
int
i
=
0
;
i
<
n
;
i
++
)
{
char
ch
=
INPUT
.
get
();
if
(
ch
==
'+'
)
params
.
chomp
=
KEEP
;
else
if
(
ch
==
'-'
)
params
.
chomp
=
STRIP
;
else
if
(
Exp
::
Digit
().
Matches
(
ch
))
{
if
(
ch
==
'0'
)
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
ZERO_INDENT_IN_BLOCK
);
params
.
indent
=
ch
-
'0'
;
params
.
detectIndent
=
false
;
}
}
// now eat whitespace
while
(
Exp
::
Blank
().
Matches
(
INPUT
))
INPUT
.
eat
(
1
);
// and comments to the end of the line
if
(
Exp
::
Comment
().
Matches
(
INPUT
))
while
(
INPUT
&&
!
Exp
::
Break
().
Matches
(
INPUT
))
INPUT
.
eat
(
1
);
// if it's not a line break, then we ran into a bad character inline
if
(
INPUT
&&
!
Exp
::
Break
().
Matches
(
INPUT
))
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
CHAR_IN_BLOCK
);
// set the initial indentation
if
(
GetTopIndent
()
>=
0
)
params
.
indent
+=
GetTopIndent
();
params
.
eatLeadingWhitespace
=
false
;
params
.
trimTrailingSpaces
=
false
;
params
.
onTabInIndentation
=
THROW
;
scalar
=
ScanScalar
(
INPUT
,
params
);
// simple keys always ok after block scalars (since we're gonna start a new line anyways)
m_simpleKeyAllowed
=
true
;
m_canBeJSONFlow
=
false
;
Token
token
(
Token
::
NON_PLAIN_SCALAR
,
mark
);
token
.
value
=
scalar
;
m_tokens
.
push
(
token
);
}
namespace
YAML
{
///////////////////////////////////////////////////////////////////////
// Specialization for scanning specific tokens
// Directive
// . Note: no semantic checking is done here (that's for the parser to do)
void
Scanner
::
ScanDirective
()
{
std
::
string
name
;
std
::
vector
<
std
::
string
>
params
;
// pop indents and simple keys
PopAllIndents
();
PopAllSimpleKeys
();
m_simpleKeyAllowed
=
false
;
m_canBeJSONFlow
=
false
;
// store pos and eat indicator
Token
token
(
Token
::
DIRECTIVE
,
INPUT
.
mark
());
INPUT
.
eat
(
1
);
// read name
while
(
INPUT
&&
!
Exp
::
BlankOrBreak
().
Matches
(
INPUT
))
token
.
value
+=
INPUT
.
get
();
// read parameters
while
(
1
)
{
// first get rid of whitespace
while
(
Exp
::
Blank
().
Matches
(
INPUT
))
INPUT
.
eat
(
1
);
// break on newline or comment
if
(
!
INPUT
||
Exp
::
Break
().
Matches
(
INPUT
)
||
Exp
::
Comment
().
Matches
(
INPUT
))
break
;
// now read parameter
std
::
string
param
;
while
(
INPUT
&&
!
Exp
::
BlankOrBreak
().
Matches
(
INPUT
))
param
+=
INPUT
.
get
();
token
.
params
.
push_back
(
param
);
}
m_tokens
.
push
(
token
);
}
// DocStart
void
Scanner
::
ScanDocStart
()
{
PopAllIndents
();
PopAllSimpleKeys
();
m_simpleKeyAllowed
=
false
;
m_canBeJSONFlow
=
false
;
// eat
Mark
mark
=
INPUT
.
mark
();
INPUT
.
eat
(
3
);
m_tokens
.
push
(
Token
(
Token
::
DOC_START
,
mark
));
}
// DocEnd
void
Scanner
::
ScanDocEnd
()
{
PopAllIndents
();
PopAllSimpleKeys
();
m_simpleKeyAllowed
=
false
;
m_canBeJSONFlow
=
false
;
// eat
Mark
mark
=
INPUT
.
mark
();
INPUT
.
eat
(
3
);
m_tokens
.
push
(
Token
(
Token
::
DOC_END
,
mark
));
}
// FlowStart
void
Scanner
::
ScanFlowStart
()
{
// flows can be simple keys
InsertPotentialSimpleKey
();
m_simpleKeyAllowed
=
true
;
m_canBeJSONFlow
=
false
;
// eat
Mark
mark
=
INPUT
.
mark
();
char
ch
=
INPUT
.
get
();
FLOW_MARKER
flowType
=
(
ch
==
Keys
::
FlowSeqStart
?
FLOW_SEQ
:
FLOW_MAP
);
m_flows
.
push
(
flowType
);
Token
::
TYPE
type
=
(
flowType
==
FLOW_SEQ
?
Token
::
FLOW_SEQ_START
:
Token
::
FLOW_MAP_START
);
m_tokens
.
push
(
Token
(
type
,
mark
));
}
// FlowEnd
void
Scanner
::
ScanFlowEnd
()
{
if
(
InBlockContext
())
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
FLOW_END
);
// we might have a solo entry in the flow context
if
(
InFlowContext
())
{
if
(
m_flows
.
top
()
==
FLOW_MAP
&&
VerifySimpleKey
())
m_tokens
.
push
(
Token
(
Token
::
VALUE
,
INPUT
.
mark
()));
else
if
(
m_flows
.
top
()
==
FLOW_SEQ
)
InvalidateSimpleKey
();
}
m_simpleKeyAllowed
=
false
;
m_canBeJSONFlow
=
true
;
// eat
Mark
mark
=
INPUT
.
mark
();
char
ch
=
INPUT
.
get
();
// check that it matches the start
FLOW_MARKER
flowType
=
(
ch
==
Keys
::
FlowSeqEnd
?
FLOW_SEQ
:
FLOW_MAP
);
if
(
m_flows
.
top
()
!=
flowType
)
throw
ParserException
(
mark
,
ErrorMsg
::
FLOW_END
);
m_flows
.
pop
();
Token
::
TYPE
type
=
(
flowType
?
Token
::
FLOW_SEQ_END
:
Token
::
FLOW_MAP_END
);
m_tokens
.
push
(
Token
(
type
,
mark
));
}
// FlowEntry
void
Scanner
::
ScanFlowEntry
()
{
// we might have a solo entry in the flow context
if
(
InFlowContext
())
{
if
(
m_flows
.
top
()
==
FLOW_MAP
&&
VerifySimpleKey
())
m_tokens
.
push
(
Token
(
Token
::
VALUE
,
INPUT
.
mark
()));
else
if
(
m_flows
.
top
()
==
FLOW_SEQ
)
InvalidateSimpleKey
();
}
m_simpleKeyAllowed
=
true
;
m_canBeJSONFlow
=
false
;
// eat
Mark
mark
=
INPUT
.
mark
();
INPUT
.
eat
(
1
);
m_tokens
.
push
(
Token
(
Token
::
FLOW_ENTRY
,
mark
));
}
// BlockEntry
void
Scanner
::
ScanBlockEntry
()
{
// we better be in the block context!
if
(
InFlowContext
())
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
BLOCK_ENTRY
);
// can we put it here?
if
(
!
m_simpleKeyAllowed
)
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
BLOCK_ENTRY
);
PushIndentTo
(
INPUT
.
column
(),
IndentMarker
::
SEQ
);
m_simpleKeyAllowed
=
true
;
m_canBeJSONFlow
=
false
;
// eat
Mark
mark
=
INPUT
.
mark
();
INPUT
.
eat
(
1
);
m_tokens
.
push
(
Token
(
Token
::
BLOCK_ENTRY
,
mark
));
}
// Key
void
Scanner
::
ScanKey
()
{
// handle keys diffently in the block context (and manage indents)
if
(
InBlockContext
())
{
if
(
!
m_simpleKeyAllowed
)
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
MAP_KEY
);
PushIndentTo
(
INPUT
.
column
(),
IndentMarker
::
MAP
);
}
// can only put a simple key here if we're in block context
m_simpleKeyAllowed
=
InBlockContext
();
// eat
Mark
mark
=
INPUT
.
mark
();
INPUT
.
eat
(
1
);
m_tokens
.
push
(
Token
(
Token
::
KEY
,
mark
));
}
// Value
void
Scanner
::
ScanValue
()
{
// and check that simple key
bool
isSimpleKey
=
VerifySimpleKey
();
m_canBeJSONFlow
=
false
;
if
(
isSimpleKey
)
{
// can't follow a simple key with another simple key (dunno why, though - it
// seems fine)
m_simpleKeyAllowed
=
false
;
}
else
{
// handle values diffently in the block context (and manage indents)
if
(
InBlockContext
())
{
if
(
!
m_simpleKeyAllowed
)
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
MAP_VALUE
);
PushIndentTo
(
INPUT
.
column
(),
IndentMarker
::
MAP
);
}
// can only put a simple key here if we're in block context
m_simpleKeyAllowed
=
InBlockContext
();
}
// eat
Mark
mark
=
INPUT
.
mark
();
INPUT
.
eat
(
1
);
m_tokens
.
push
(
Token
(
Token
::
VALUE
,
mark
));
}
// AnchorOrAlias
void
Scanner
::
ScanAnchorOrAlias
()
{
bool
alias
;
std
::
string
name
;
// insert a potential simple key
InsertPotentialSimpleKey
();
m_simpleKeyAllowed
=
false
;
m_canBeJSONFlow
=
false
;
// eat the indicator
Mark
mark
=
INPUT
.
mark
();
char
indicator
=
INPUT
.
get
();
alias
=
(
indicator
==
Keys
::
Alias
);
// now eat the content
while
(
INPUT
&&
Exp
::
Anchor
().
Matches
(
INPUT
))
name
+=
INPUT
.
get
();
// we need to have read SOMETHING!
if
(
name
.
empty
())
throw
ParserException
(
INPUT
.
mark
(),
alias
?
ErrorMsg
::
ALIAS_NOT_FOUND
:
ErrorMsg
::
ANCHOR_NOT_FOUND
);
// and needs to end correctly
if
(
INPUT
&&
!
Exp
::
AnchorEnd
().
Matches
(
INPUT
))
throw
ParserException
(
INPUT
.
mark
(),
alias
?
ErrorMsg
::
CHAR_IN_ALIAS
:
ErrorMsg
::
CHAR_IN_ANCHOR
);
// and we're done
Token
token
(
alias
?
Token
::
ALIAS
:
Token
::
ANCHOR
,
mark
);
token
.
value
=
name
;
m_tokens
.
push
(
token
);
}
// Tag
void
Scanner
::
ScanTag
()
{
// insert a potential simple key
InsertPotentialSimpleKey
();
m_simpleKeyAllowed
=
false
;
m_canBeJSONFlow
=
false
;
Token
token
(
Token
::
TAG
,
INPUT
.
mark
());
// eat the indicator
INPUT
.
get
();
if
(
INPUT
&&
INPUT
.
peek
()
==
Keys
::
VerbatimTagStart
)
{
std
::
string
tag
=
ScanVerbatimTag
(
INPUT
);
token
.
value
=
tag
;
token
.
data
=
Tag
::
VERBATIM
;
}
else
{
bool
canBeHandle
;
token
.
value
=
ScanTagHandle
(
INPUT
,
canBeHandle
);
if
(
!
canBeHandle
&&
token
.
value
.
empty
())
token
.
data
=
Tag
::
NON_SPECIFIC
;
else
if
(
token
.
value
.
empty
())
token
.
data
=
Tag
::
SECONDARY_HANDLE
;
else
token
.
data
=
Tag
::
PRIMARY_HANDLE
;
// is there a suffix?
if
(
canBeHandle
&&
INPUT
.
peek
()
==
Keys
::
Tag
)
{
// eat the indicator
INPUT
.
get
();
token
.
params
.
push_back
(
ScanTagSuffix
(
INPUT
));
token
.
data
=
Tag
::
NAMED_HANDLE
;
}
}
m_tokens
.
push
(
token
);
}
// PlainScalar
void
Scanner
::
ScanPlainScalar
()
{
std
::
string
scalar
;
// set up the scanning parameters
ScanScalarParams
params
;
params
.
end
=
(
InFlowContext
()
?
Exp
::
EndScalarInFlow
()
:
Exp
::
EndScalar
())
||
(
Exp
::
BlankOrBreak
()
+
Exp
::
Comment
());
params
.
eatEnd
=
false
;
params
.
indent
=
(
InFlowContext
()
?
0
:
GetTopIndent
()
+
1
);
params
.
fold
=
FOLD_FLOW
;
params
.
eatLeadingWhitespace
=
true
;
params
.
trimTrailingSpaces
=
true
;
params
.
chomp
=
STRIP
;
params
.
onDocIndicator
=
BREAK
;
params
.
onTabInIndentation
=
THROW
;
// insert a potential simple key
InsertPotentialSimpleKey
();
Mark
mark
=
INPUT
.
mark
();
scalar
=
ScanScalar
(
INPUT
,
params
);
// can have a simple key only if we ended the scalar by starting a new line
m_simpleKeyAllowed
=
params
.
leadingSpaces
;
m_canBeJSONFlow
=
false
;
// finally, check and see if we ended on an illegal character
// if(Exp::IllegalCharInScalar.Matches(INPUT))
// throw ParserException(INPUT.mark(), ErrorMsg::CHAR_IN_SCALAR);
Token
token
(
Token
::
PLAIN_SCALAR
,
mark
);
token
.
value
=
scalar
;
m_tokens
.
push
(
token
);
}
// QuotedScalar
void
Scanner
::
ScanQuotedScalar
()
{
std
::
string
scalar
;
// peek at single or double quote (don't eat because we need to preserve (for
// the time being) the input position)
char
quote
=
INPUT
.
peek
();
bool
single
=
(
quote
==
'\''
);
// setup the scanning parameters
ScanScalarParams
params
;
params
.
end
=
(
single
?
RegEx
(
quote
)
&&
!
Exp
::
EscSingleQuote
()
:
RegEx
(
quote
));
params
.
eatEnd
=
true
;
params
.
escape
=
(
single
?
'\''
:
'\\'
);
params
.
indent
=
0
;
params
.
fold
=
FOLD_FLOW
;
params
.
eatLeadingWhitespace
=
true
;
params
.
trimTrailingSpaces
=
false
;
params
.
chomp
=
CLIP
;
params
.
onDocIndicator
=
THROW
;
// insert a potential simple key
InsertPotentialSimpleKey
();
Mark
mark
=
INPUT
.
mark
();
// now eat that opening quote
INPUT
.
get
();
// and scan
scalar
=
ScanScalar
(
INPUT
,
params
);
m_simpleKeyAllowed
=
false
;
m_canBeJSONFlow
=
true
;
Token
token
(
Token
::
NON_PLAIN_SCALAR
,
mark
);
token
.
value
=
scalar
;
m_tokens
.
push
(
token
);
}
// BlockScalarToken
// . These need a little extra processing beforehand.
// . We need to scan the line where the indicator is (this doesn't count as part
// of the scalar),
// and then we need to figure out what level of indentation we'll be using.
void
Scanner
::
ScanBlockScalar
()
{
std
::
string
scalar
;
ScanScalarParams
params
;
params
.
indent
=
1
;
params
.
detectIndent
=
true
;
// eat block indicator ('|' or '>')
Mark
mark
=
INPUT
.
mark
();
char
indicator
=
INPUT
.
get
();
params
.
fold
=
(
indicator
==
Keys
::
FoldedScalar
?
FOLD_BLOCK
:
DONT_FOLD
);
// eat chomping/indentation indicators
params
.
chomp
=
CLIP
;
int
n
=
Exp
::
Chomp
().
Match
(
INPUT
);
for
(
int
i
=
0
;
i
<
n
;
i
++
)
{
char
ch
=
INPUT
.
get
();
if
(
ch
==
'+'
)
params
.
chomp
=
KEEP
;
else
if
(
ch
==
'-'
)
params
.
chomp
=
STRIP
;
else
if
(
Exp
::
Digit
().
Matches
(
ch
))
{
if
(
ch
==
'0'
)
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
ZERO_INDENT_IN_BLOCK
);
params
.
indent
=
ch
-
'0'
;
params
.
detectIndent
=
false
;
}
}
// now eat whitespace
while
(
Exp
::
Blank
().
Matches
(
INPUT
))
INPUT
.
eat
(
1
);
// and comments to the end of the line
if
(
Exp
::
Comment
().
Matches
(
INPUT
))
while
(
INPUT
&&
!
Exp
::
Break
().
Matches
(
INPUT
))
INPUT
.
eat
(
1
);
// if it's not a line break, then we ran into a bad character inline
if
(
INPUT
&&
!
Exp
::
Break
().
Matches
(
INPUT
))
throw
ParserException
(
INPUT
.
mark
(),
ErrorMsg
::
CHAR_IN_BLOCK
);
// set the initial indentation
if
(
GetTopIndent
()
>=
0
)
params
.
indent
+=
GetTopIndent
();
params
.
eatLeadingWhitespace
=
false
;
params
.
trimTrailingSpaces
=
false
;
params
.
onTabInIndentation
=
THROW
;
scalar
=
ScanScalar
(
INPUT
,
params
);
// simple keys always ok after block scalars (since we're gonna start a new
// line anyways)
m_simpleKeyAllowed
=
true
;
m_canBeJSONFlow
=
false
;
Token
token
(
Token
::
NON_PLAIN_SCALAR
,
mark
);
token
.
value
=
scalar
;
m_tokens
.
push
(
token
);
}
}
src/setting.h
View file @
3355bbb3
#ifndef SETTING_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define SETTING_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#endif
#include <memory>
#include <vector>
#include "yaml-cpp/noncopyable.h"
namespace
YAML
{
class
SettingChangeBase
;
template
<
typename
T
>
class
Setting
{
public:
Setting
()
:
m_value
()
{}
const
T
get
()
const
{
return
m_value
;
}
std
::
auto_ptr
<
SettingChangeBase
>
set
(
const
T
&
value
);
void
restore
(
const
Setting
<
T
>&
oldSetting
)
{
m_value
=
oldSetting
.
get
();
}
private:
T
m_value
;
};
class
SettingChangeBase
{
public:
virtual
~
SettingChangeBase
()
{}
virtual
void
pop
()
=
0
;
};
template
<
typename
T
>
class
SettingChange
:
public
SettingChangeBase
{
public:
SettingChange
(
Setting
<
T
>
*
pSetting
)
:
m_pCurSetting
(
pSetting
)
{
// copy old setting to save its state
m_oldSetting
=
*
pSetting
;
}
virtual
void
pop
()
{
m_pCurSetting
->
restore
(
m_oldSetting
);
}
private:
Setting
<
T
>
*
m_pCurSetting
;
Setting
<
T
>
m_oldSetting
;
};
template
<
typename
T
>
inline
std
::
auto_ptr
<
SettingChangeBase
>
Setting
<
T
>::
set
(
const
T
&
value
)
{
std
::
auto_ptr
<
SettingChangeBase
>
pChange
(
new
SettingChange
<
T
>
(
this
));
m_value
=
value
;
return
pChange
;
}
class
SettingChanges
:
private
noncopyable
{
public:
SettingChanges
()
{}
~
SettingChanges
()
{
clear
();
}
void
clear
()
{
restore
();
for
(
setting_changes
::
const_iterator
it
=
m_settingChanges
.
begin
();
it
!=
m_settingChanges
.
end
();
++
it
)
delete
*
it
;
m_settingChanges
.
clear
();
}
void
restore
()
{
for
(
setting_changes
::
const_iterator
it
=
m_settingChanges
.
begin
();
it
!=
m_settingChanges
.
end
();
++
it
)
(
*
it
)
->
pop
();
}
void
push
(
std
::
auto_ptr
<
SettingChangeBase
>
pSettingChange
)
{
m_settingChanges
.
push_back
(
pSettingChange
.
release
());
}
// like std::auto_ptr - assignment is transfer of ownership
SettingChanges
&
operator
=
(
SettingChanges
&
rhs
)
{
if
(
this
==
&
rhs
)
return
*
this
;
clear
();
m_settingChanges
=
rhs
.
m_settingChanges
;
rhs
.
m_settingChanges
.
clear
();
return
*
this
;
}
private:
typedef
std
::
vector
<
SettingChangeBase
*>
setting_changes
;
setting_changes
m_settingChanges
;
};
namespace
YAML
{
class
SettingChangeBase
;
template
<
typename
T
>
class
Setting
{
public:
Setting
()
:
m_value
()
{}
const
T
get
()
const
{
return
m_value
;
}
std
::
auto_ptr
<
SettingChangeBase
>
set
(
const
T
&
value
);
void
restore
(
const
Setting
<
T
>&
oldSetting
)
{
m_value
=
oldSetting
.
get
();
}
private:
T
m_value
;
};
class
SettingChangeBase
{
public:
virtual
~
SettingChangeBase
()
{}
virtual
void
pop
()
=
0
;
};
template
<
typename
T
>
class
SettingChange
:
public
SettingChangeBase
{
public:
SettingChange
(
Setting
<
T
>*
pSetting
)
:
m_pCurSetting
(
pSetting
)
{
// copy old setting to save its state
m_oldSetting
=
*
pSetting
;
}
virtual
void
pop
()
{
m_pCurSetting
->
restore
(
m_oldSetting
);
}
private:
Setting
<
T
>*
m_pCurSetting
;
Setting
<
T
>
m_oldSetting
;
};
template
<
typename
T
>
inline
std
::
auto_ptr
<
SettingChangeBase
>
Setting
<
T
>::
set
(
const
T
&
value
)
{
std
::
auto_ptr
<
SettingChangeBase
>
pChange
(
new
SettingChange
<
T
>
(
this
));
m_value
=
value
;
return
pChange
;
}
class
SettingChanges
:
private
noncopyable
{
public:
SettingChanges
()
{}
~
SettingChanges
()
{
clear
();
}
void
clear
()
{
restore
();
for
(
setting_changes
::
const_iterator
it
=
m_settingChanges
.
begin
();
it
!=
m_settingChanges
.
end
();
++
it
)
delete
*
it
;
m_settingChanges
.
clear
();
}
void
restore
()
{
for
(
setting_changes
::
const_iterator
it
=
m_settingChanges
.
begin
();
it
!=
m_settingChanges
.
end
();
++
it
)
(
*
it
)
->
pop
();
}
void
push
(
std
::
auto_ptr
<
SettingChangeBase
>
pSettingChange
)
{
m_settingChanges
.
push_back
(
pSettingChange
.
release
());
}
// like std::auto_ptr - assignment is transfer of ownership
SettingChanges
&
operator
=
(
SettingChanges
&
rhs
)
{
if
(
this
==
&
rhs
)
return
*
this
;
clear
();
m_settingChanges
=
rhs
.
m_settingChanges
;
rhs
.
m_settingChanges
.
clear
();
return
*
this
;
}
private:
typedef
std
::
vector
<
SettingChangeBase
*>
setting_changes
;
setting_changes
m_settingChanges
;
};
}
#endif // SETTING_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif
// SETTING_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/simplekey.cpp
View file @
3355bbb3
...
...
@@ -3,137 +3,126 @@
#include "yaml-cpp/exceptions.h"
#include "exp.h"
namespace
YAML
{
Scanner
::
SimpleKey
::
SimpleKey
(
const
Mark
&
mark_
,
int
flowLevel_
)
:
mark
(
mark_
),
flowLevel
(
flowLevel_
),
pIndent
(
0
),
pMapStart
(
0
),
pKey
(
0
)
{
}
void
Scanner
::
SimpleKey
::
Validate
()
{
// Note: pIndent will *not* be garbage here;
// we "garbage collect" them so we can
// always refer to them
if
(
pIndent
)
pIndent
->
status
=
IndentMarker
::
VALID
;
if
(
pMapStart
)
pMapStart
->
status
=
Token
::
VALID
;
if
(
pKey
)
pKey
->
status
=
Token
::
VALID
;
}
void
Scanner
::
SimpleKey
::
Invalidate
()
{
if
(
pIndent
)
pIndent
->
status
=
IndentMarker
::
INVALID
;
if
(
pMapStart
)
pMapStart
->
status
=
Token
::
INVALID
;
if
(
pKey
)
pKey
->
status
=
Token
::
INVALID
;
}
// CanInsertPotentialSimpleKey
bool
Scanner
::
CanInsertPotentialSimpleKey
()
const
{
if
(
!
m_simpleKeyAllowed
)
return
false
;
return
!
ExistsActiveSimpleKey
();
}
// ExistsActiveSimpleKey
// . Returns true if there's a potential simple key at our flow level
// (there's allowed at most one per flow level, i.e., at the start of the flow start token)
bool
Scanner
::
ExistsActiveSimpleKey
()
const
{
if
(
m_simpleKeys
.
empty
())
return
false
;
const
SimpleKey
&
key
=
m_simpleKeys
.
top
();
return
key
.
flowLevel
==
GetFlowLevel
();
}
// InsertPotentialSimpleKey
// . If we can, add a potential simple key to the queue,
// and save it on a stack.
void
Scanner
::
InsertPotentialSimpleKey
()
{
if
(
!
CanInsertPotentialSimpleKey
())
return
;
SimpleKey
key
(
INPUT
.
mark
(),
GetFlowLevel
());
// first add a map start, if necessary
if
(
InBlockContext
())
{
key
.
pIndent
=
PushIndentTo
(
INPUT
.
column
(),
IndentMarker
::
MAP
);
if
(
key
.
pIndent
)
{
key
.
pIndent
->
status
=
IndentMarker
::
UNKNOWN
;
key
.
pMapStart
=
key
.
pIndent
->
pStartToken
;
key
.
pMapStart
->
status
=
Token
::
UNVERIFIED
;
}
}
// then add the (now unverified) key
m_tokens
.
push
(
Token
(
Token
::
KEY
,
INPUT
.
mark
()));
key
.
pKey
=
&
m_tokens
.
back
();
key
.
pKey
->
status
=
Token
::
UNVERIFIED
;
m_simpleKeys
.
push
(
key
);
}
// InvalidateSimpleKey
// . Automatically invalidate the simple key in our flow level
void
Scanner
::
InvalidateSimpleKey
()
{
if
(
m_simpleKeys
.
empty
())
return
;
// grab top key
SimpleKey
&
key
=
m_simpleKeys
.
top
();
if
(
key
.
flowLevel
!=
GetFlowLevel
())
return
;
key
.
Invalidate
();
m_simpleKeys
.
pop
();
}
// VerifySimpleKey
// . Determines whether the latest simple key to be added is valid,
// and if so, makes it valid.
bool
Scanner
::
VerifySimpleKey
()
{
if
(
m_simpleKeys
.
empty
())
return
false
;
// grab top key
SimpleKey
key
=
m_simpleKeys
.
top
();
// only validate if we're in the correct flow level
if
(
key
.
flowLevel
!=
GetFlowLevel
())
return
false
;
m_simpleKeys
.
pop
();
bool
isValid
=
true
;
// needs to be less than 1024 characters and inline
if
(
INPUT
.
line
()
!=
key
.
mark
.
line
||
INPUT
.
pos
()
-
key
.
mark
.
pos
>
1024
)
isValid
=
false
;
// invalidate key
if
(
isValid
)
key
.
Validate
();
else
key
.
Invalidate
();
return
isValid
;
}
void
Scanner
::
PopAllSimpleKeys
()
{
while
(
!
m_simpleKeys
.
empty
())
m_simpleKeys
.
pop
();
}
namespace
YAML
{
Scanner
::
SimpleKey
::
SimpleKey
(
const
Mark
&
mark_
,
int
flowLevel_
)
:
mark
(
mark_
),
flowLevel
(
flowLevel_
),
pIndent
(
0
),
pMapStart
(
0
),
pKey
(
0
)
{}
void
Scanner
::
SimpleKey
::
Validate
()
{
// Note: pIndent will *not* be garbage here;
// we "garbage collect" them so we can
// always refer to them
if
(
pIndent
)
pIndent
->
status
=
IndentMarker
::
VALID
;
if
(
pMapStart
)
pMapStart
->
status
=
Token
::
VALID
;
if
(
pKey
)
pKey
->
status
=
Token
::
VALID
;
}
void
Scanner
::
SimpleKey
::
Invalidate
()
{
if
(
pIndent
)
pIndent
->
status
=
IndentMarker
::
INVALID
;
if
(
pMapStart
)
pMapStart
->
status
=
Token
::
INVALID
;
if
(
pKey
)
pKey
->
status
=
Token
::
INVALID
;
}
// CanInsertPotentialSimpleKey
bool
Scanner
::
CanInsertPotentialSimpleKey
()
const
{
if
(
!
m_simpleKeyAllowed
)
return
false
;
return
!
ExistsActiveSimpleKey
();
}
// ExistsActiveSimpleKey
// . Returns true if there's a potential simple key at our flow level
// (there's allowed at most one per flow level, i.e., at the start of the flow
// start token)
bool
Scanner
::
ExistsActiveSimpleKey
()
const
{
if
(
m_simpleKeys
.
empty
())
return
false
;
const
SimpleKey
&
key
=
m_simpleKeys
.
top
();
return
key
.
flowLevel
==
GetFlowLevel
();
}
// InsertPotentialSimpleKey
// . If we can, add a potential simple key to the queue,
// and save it on a stack.
void
Scanner
::
InsertPotentialSimpleKey
()
{
if
(
!
CanInsertPotentialSimpleKey
())
return
;
SimpleKey
key
(
INPUT
.
mark
(),
GetFlowLevel
());
// first add a map start, if necessary
if
(
InBlockContext
())
{
key
.
pIndent
=
PushIndentTo
(
INPUT
.
column
(),
IndentMarker
::
MAP
);
if
(
key
.
pIndent
)
{
key
.
pIndent
->
status
=
IndentMarker
::
UNKNOWN
;
key
.
pMapStart
=
key
.
pIndent
->
pStartToken
;
key
.
pMapStart
->
status
=
Token
::
UNVERIFIED
;
}
}
// then add the (now unverified) key
m_tokens
.
push
(
Token
(
Token
::
KEY
,
INPUT
.
mark
()));
key
.
pKey
=
&
m_tokens
.
back
();
key
.
pKey
->
status
=
Token
::
UNVERIFIED
;
m_simpleKeys
.
push
(
key
);
}
// InvalidateSimpleKey
// . Automatically invalidate the simple key in our flow level
void
Scanner
::
InvalidateSimpleKey
()
{
if
(
m_simpleKeys
.
empty
())
return
;
// grab top key
SimpleKey
&
key
=
m_simpleKeys
.
top
();
if
(
key
.
flowLevel
!=
GetFlowLevel
())
return
;
key
.
Invalidate
();
m_simpleKeys
.
pop
();
}
// VerifySimpleKey
// . Determines whether the latest simple key to be added is valid,
// and if so, makes it valid.
bool
Scanner
::
VerifySimpleKey
()
{
if
(
m_simpleKeys
.
empty
())
return
false
;
// grab top key
SimpleKey
key
=
m_simpleKeys
.
top
();
// only validate if we're in the correct flow level
if
(
key
.
flowLevel
!=
GetFlowLevel
())
return
false
;
m_simpleKeys
.
pop
();
bool
isValid
=
true
;
// needs to be less than 1024 characters and inline
if
(
INPUT
.
line
()
!=
key
.
mark
.
line
||
INPUT
.
pos
()
-
key
.
mark
.
pos
>
1024
)
isValid
=
false
;
// invalidate key
if
(
isValid
)
key
.
Validate
();
else
key
.
Invalidate
();
return
isValid
;
}
void
Scanner
::
PopAllSimpleKeys
()
{
while
(
!
m_simpleKeys
.
empty
())
m_simpleKeys
.
pop
();
}
}
src/singledocparser.cpp
View file @
3355bbb3
...
...
@@ -10,385 +10,394 @@
#include <cstdio>
#include <algorithm>
namespace
YAML
{
SingleDocParser
::
SingleDocParser
(
Scanner
&
scanner
,
const
Directives
&
directives
)
:
m_scanner
(
scanner
),
m_directives
(
directives
),
m_pCollectionStack
(
new
CollectionStack
),
m_curAnchor
(
0
)
{
}
SingleDocParser
::~
SingleDocParser
()
{
}
// HandleDocument
// . Handles the next document
// . Throws a ParserException on error.
void
SingleDocParser
::
HandleDocument
(
EventHandler
&
eventHandler
)
{
assert
(
!
m_scanner
.
empty
());
// guaranteed that there are tokens
assert
(
!
m_curAnchor
);
eventHandler
.
OnDocumentStart
(
m_scanner
.
peek
().
mark
);
// eat doc start
if
(
m_scanner
.
peek
().
type
==
Token
::
DOC_START
)
m_scanner
.
pop
();
// recurse!
HandleNode
(
eventHandler
);
eventHandler
.
OnDocumentEnd
();
// and finally eat any doc ends we see
while
(
!
m_scanner
.
empty
()
&&
m_scanner
.
peek
().
type
==
Token
::
DOC_END
)
m_scanner
.
pop
();
}
void
SingleDocParser
::
HandleNode
(
EventHandler
&
eventHandler
)
{
// an empty node *is* a possibility
if
(
m_scanner
.
empty
())
{
eventHandler
.
OnNull
(
m_scanner
.
mark
(),
NullAnchor
);
return
;
}
// save location
Mark
mark
=
m_scanner
.
peek
().
mark
;
// special case: a value node by itself must be a map, with no header
if
(
m_scanner
.
peek
().
type
==
Token
::
VALUE
)
{
eventHandler
.
OnMapStart
(
mark
,
"?"
,
NullAnchor
);
HandleMap
(
eventHandler
);
eventHandler
.
OnMapEnd
();
return
;
}
// special case: an alias node
if
(
m_scanner
.
peek
().
type
==
Token
::
ALIAS
)
{
eventHandler
.
OnAlias
(
mark
,
LookupAnchor
(
mark
,
m_scanner
.
peek
().
value
));
m_scanner
.
pop
();
return
;
}
std
::
string
tag
;
anchor_t
anchor
;
ParseProperties
(
tag
,
anchor
);
const
Token
&
token
=
m_scanner
.
peek
();
if
(
token
.
type
==
Token
::
PLAIN_SCALAR
&&
token
.
value
==
"null"
)
{
eventHandler
.
OnNull
(
mark
,
anchor
);
m_scanner
.
pop
();
return
;
}
// add non-specific tags
if
(
tag
.
empty
())
tag
=
(
token
.
type
==
Token
::
NON_PLAIN_SCALAR
?
"!"
:
"?"
);
// now split based on what kind of node we should be
switch
(
token
.
type
)
{
case
Token
::
PLAIN_SCALAR
:
case
Token
::
NON_PLAIN_SCALAR
:
eventHandler
.
OnScalar
(
mark
,
tag
,
anchor
,
token
.
value
);
m_scanner
.
pop
();
return
;
case
Token
::
FLOW_SEQ_START
:
case
Token
::
BLOCK_SEQ_START
:
eventHandler
.
OnSequenceStart
(
mark
,
tag
,
anchor
);
HandleSequence
(
eventHandler
);
eventHandler
.
OnSequenceEnd
();
return
;
case
Token
::
FLOW_MAP_START
:
case
Token
::
BLOCK_MAP_START
:
eventHandler
.
OnMapStart
(
mark
,
tag
,
anchor
);
HandleMap
(
eventHandler
);
eventHandler
.
OnMapEnd
();
return
;
case
Token
::
KEY
:
// compact maps can only go in a flow sequence
if
(
m_pCollectionStack
->
GetCurCollectionType
()
==
CollectionType
::
FlowSeq
)
{
eventHandler
.
OnMapStart
(
mark
,
tag
,
anchor
);
HandleMap
(
eventHandler
);
eventHandler
.
OnMapEnd
();
return
;
}
break
;
default:
break
;
}
if
(
tag
==
"?"
)
eventHandler
.
OnNull
(
mark
,
anchor
);
else
eventHandler
.
OnScalar
(
mark
,
tag
,
anchor
,
""
);
}
void
SingleDocParser
::
HandleSequence
(
EventHandler
&
eventHandler
)
{
// split based on start token
switch
(
m_scanner
.
peek
().
type
)
{
case
Token
::
BLOCK_SEQ_START
:
HandleBlockSequence
(
eventHandler
);
break
;
case
Token
::
FLOW_SEQ_START
:
HandleFlowSequence
(
eventHandler
);
break
;
default:
break
;
}
}
void
SingleDocParser
::
HandleBlockSequence
(
EventHandler
&
eventHandler
)
{
// eat start token
m_scanner
.
pop
();
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
BlockSeq
);
while
(
1
)
{
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_SEQ
);
Token
token
=
m_scanner
.
peek
();
if
(
token
.
type
!=
Token
::
BLOCK_ENTRY
&&
token
.
type
!=
Token
::
BLOCK_SEQ_END
)
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
END_OF_SEQ
);
m_scanner
.
pop
();
if
(
token
.
type
==
Token
::
BLOCK_SEQ_END
)
break
;
// check for null
if
(
!
m_scanner
.
empty
())
{
const
Token
&
token
=
m_scanner
.
peek
();
if
(
token
.
type
==
Token
::
BLOCK_ENTRY
||
token
.
type
==
Token
::
BLOCK_SEQ_END
)
{
eventHandler
.
OnNull
(
token
.
mark
,
NullAnchor
);
continue
;
}
}
HandleNode
(
eventHandler
);
}
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
BlockSeq
);
}
void
SingleDocParser
::
HandleFlowSequence
(
EventHandler
&
eventHandler
)
{
// eat start token
m_scanner
.
pop
();
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
FlowSeq
);
while
(
1
)
{
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_SEQ_FLOW
);
// first check for end
if
(
m_scanner
.
peek
().
type
==
Token
::
FLOW_SEQ_END
)
{
m_scanner
.
pop
();
break
;
}
// then read the node
HandleNode
(
eventHandler
);
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_SEQ_FLOW
);
// now eat the separator (or could be a sequence end, which we ignore - but if it's neither, then it's a bad node)
Token
&
token
=
m_scanner
.
peek
();
if
(
token
.
type
==
Token
::
FLOW_ENTRY
)
m_scanner
.
pop
();
else
if
(
token
.
type
!=
Token
::
FLOW_SEQ_END
)
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
END_OF_SEQ_FLOW
);
}
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
FlowSeq
);
}
void
SingleDocParser
::
HandleMap
(
EventHandler
&
eventHandler
)
{
// split based on start token
switch
(
m_scanner
.
peek
().
type
)
{
case
Token
::
BLOCK_MAP_START
:
HandleBlockMap
(
eventHandler
);
break
;
case
Token
::
FLOW_MAP_START
:
HandleFlowMap
(
eventHandler
);
break
;
case
Token
::
KEY
:
HandleCompactMap
(
eventHandler
);
break
;
case
Token
::
VALUE
:
HandleCompactMapWithNoKey
(
eventHandler
);
break
;
default:
break
;
}
}
void
SingleDocParser
::
HandleBlockMap
(
EventHandler
&
eventHandler
)
{
// eat start token
m_scanner
.
pop
();
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
BlockMap
);
while
(
1
)
{
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_MAP
);
Token
token
=
m_scanner
.
peek
();
if
(
token
.
type
!=
Token
::
KEY
&&
token
.
type
!=
Token
::
VALUE
&&
token
.
type
!=
Token
::
BLOCK_MAP_END
)
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
END_OF_MAP
);
if
(
token
.
type
==
Token
::
BLOCK_MAP_END
)
{
m_scanner
.
pop
();
break
;
}
// grab key (if non-null)
if
(
token
.
type
==
Token
::
KEY
)
{
m_scanner
.
pop
();
HandleNode
(
eventHandler
);
}
else
{
eventHandler
.
OnNull
(
token
.
mark
,
NullAnchor
);
}
// now grab value (optional)
if
(
!
m_scanner
.
empty
()
&&
m_scanner
.
peek
().
type
==
Token
::
VALUE
)
{
m_scanner
.
pop
();
HandleNode
(
eventHandler
);
}
else
{
eventHandler
.
OnNull
(
token
.
mark
,
NullAnchor
);
}
}
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
BlockMap
);
}
void
SingleDocParser
::
HandleFlowMap
(
EventHandler
&
eventHandler
)
{
// eat start token
m_scanner
.
pop
();
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
FlowMap
);
while
(
1
)
{
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_MAP_FLOW
);
Token
&
token
=
m_scanner
.
peek
();
const
Mark
mark
=
token
.
mark
;
// first check for end
if
(
token
.
type
==
Token
::
FLOW_MAP_END
)
{
m_scanner
.
pop
();
break
;
}
// grab key (if non-null)
if
(
token
.
type
==
Token
::
KEY
)
{
m_scanner
.
pop
();
HandleNode
(
eventHandler
);
}
else
{
eventHandler
.
OnNull
(
mark
,
NullAnchor
);
}
// now grab value (optional)
if
(
!
m_scanner
.
empty
()
&&
m_scanner
.
peek
().
type
==
Token
::
VALUE
)
{
m_scanner
.
pop
();
HandleNode
(
eventHandler
);
}
else
{
eventHandler
.
OnNull
(
mark
,
NullAnchor
);
}
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_MAP_FLOW
);
// now eat the separator (or could be a map end, which we ignore - but if it's neither, then it's a bad node)
Token
&
nextToken
=
m_scanner
.
peek
();
if
(
nextToken
.
type
==
Token
::
FLOW_ENTRY
)
m_scanner
.
pop
();
else
if
(
nextToken
.
type
!=
Token
::
FLOW_MAP_END
)
throw
ParserException
(
nextToken
.
mark
,
ErrorMsg
::
END_OF_MAP_FLOW
);
}
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
FlowMap
);
}
// . Single "key: value" pair in a flow sequence
void
SingleDocParser
::
HandleCompactMap
(
EventHandler
&
eventHandler
)
{
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
CompactMap
);
// grab key
Mark
mark
=
m_scanner
.
peek
().
mark
;
m_scanner
.
pop
();
HandleNode
(
eventHandler
);
// now grab value (optional)
if
(
!
m_scanner
.
empty
()
&&
m_scanner
.
peek
().
type
==
Token
::
VALUE
)
{
m_scanner
.
pop
();
HandleNode
(
eventHandler
);
}
else
{
eventHandler
.
OnNull
(
mark
,
NullAnchor
);
}
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
CompactMap
);
}
// . Single ": value" pair in a flow sequence
void
SingleDocParser
::
HandleCompactMapWithNoKey
(
EventHandler
&
eventHandler
)
{
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
CompactMap
);
// null key
eventHandler
.
OnNull
(
m_scanner
.
peek
().
mark
,
NullAnchor
);
// grab value
m_scanner
.
pop
();
HandleNode
(
eventHandler
);
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
CompactMap
);
}
// ParseProperties
// . Grabs any tag or anchor tokens and deals with them.
void
SingleDocParser
::
ParseProperties
(
std
::
string
&
tag
,
anchor_t
&
anchor
)
{
tag
.
clear
();
anchor
=
NullAnchor
;
while
(
1
)
{
if
(
m_scanner
.
empty
())
return
;
switch
(
m_scanner
.
peek
().
type
)
{
case
Token
::
TAG
:
ParseTag
(
tag
);
break
;
case
Token
::
ANCHOR
:
ParseAnchor
(
anchor
);
break
;
default:
return
;
}
}
}
void
SingleDocParser
::
ParseTag
(
std
::
string
&
tag
)
{
Token
&
token
=
m_scanner
.
peek
();
if
(
!
tag
.
empty
())
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
MULTIPLE_TAGS
);
Tag
tagInfo
(
token
);
tag
=
tagInfo
.
Translate
(
m_directives
);
m_scanner
.
pop
();
}
void
SingleDocParser
::
ParseAnchor
(
anchor_t
&
anchor
)
{
Token
&
token
=
m_scanner
.
peek
();
if
(
anchor
)
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
MULTIPLE_ANCHORS
);
anchor
=
RegisterAnchor
(
token
.
value
);
m_scanner
.
pop
();
}
anchor_t
SingleDocParser
::
RegisterAnchor
(
const
std
::
string
&
name
)
{
if
(
name
.
empty
())
return
NullAnchor
;
return
m_anchors
[
name
]
=
++
m_curAnchor
;
}
anchor_t
SingleDocParser
::
LookupAnchor
(
const
Mark
&
mark
,
const
std
::
string
&
name
)
const
{
Anchors
::
const_iterator
it
=
m_anchors
.
find
(
name
);
if
(
it
==
m_anchors
.
end
())
throw
ParserException
(
mark
,
ErrorMsg
::
UNKNOWN_ANCHOR
);
return
it
->
second
;
}
namespace
YAML
{
SingleDocParser
::
SingleDocParser
(
Scanner
&
scanner
,
const
Directives
&
directives
)
:
m_scanner
(
scanner
),
m_directives
(
directives
),
m_pCollectionStack
(
new
CollectionStack
),
m_curAnchor
(
0
)
{}
SingleDocParser
::~
SingleDocParser
()
{}
// HandleDocument
// . Handles the next document
// . Throws a ParserException on error.
void
SingleDocParser
::
HandleDocument
(
EventHandler
&
eventHandler
)
{
assert
(
!
m_scanner
.
empty
());
// guaranteed that there are tokens
assert
(
!
m_curAnchor
);
eventHandler
.
OnDocumentStart
(
m_scanner
.
peek
().
mark
);
// eat doc start
if
(
m_scanner
.
peek
().
type
==
Token
::
DOC_START
)
m_scanner
.
pop
();
// recurse!
HandleNode
(
eventHandler
);
eventHandler
.
OnDocumentEnd
();
// and finally eat any doc ends we see
while
(
!
m_scanner
.
empty
()
&&
m_scanner
.
peek
().
type
==
Token
::
DOC_END
)
m_scanner
.
pop
();
}
void
SingleDocParser
::
HandleNode
(
EventHandler
&
eventHandler
)
{
// an empty node *is* a possibility
if
(
m_scanner
.
empty
())
{
eventHandler
.
OnNull
(
m_scanner
.
mark
(),
NullAnchor
);
return
;
}
// save location
Mark
mark
=
m_scanner
.
peek
().
mark
;
// special case: a value node by itself must be a map, with no header
if
(
m_scanner
.
peek
().
type
==
Token
::
VALUE
)
{
eventHandler
.
OnMapStart
(
mark
,
"?"
,
NullAnchor
);
HandleMap
(
eventHandler
);
eventHandler
.
OnMapEnd
();
return
;
}
// special case: an alias node
if
(
m_scanner
.
peek
().
type
==
Token
::
ALIAS
)
{
eventHandler
.
OnAlias
(
mark
,
LookupAnchor
(
mark
,
m_scanner
.
peek
().
value
));
m_scanner
.
pop
();
return
;
}
std
::
string
tag
;
anchor_t
anchor
;
ParseProperties
(
tag
,
anchor
);
const
Token
&
token
=
m_scanner
.
peek
();
if
(
token
.
type
==
Token
::
PLAIN_SCALAR
&&
token
.
value
==
"null"
)
{
eventHandler
.
OnNull
(
mark
,
anchor
);
m_scanner
.
pop
();
return
;
}
// add non-specific tags
if
(
tag
.
empty
())
tag
=
(
token
.
type
==
Token
::
NON_PLAIN_SCALAR
?
"!"
:
"?"
);
// now split based on what kind of node we should be
switch
(
token
.
type
)
{
case
Token
::
PLAIN_SCALAR
:
case
Token
::
NON_PLAIN_SCALAR
:
eventHandler
.
OnScalar
(
mark
,
tag
,
anchor
,
token
.
value
);
m_scanner
.
pop
();
return
;
case
Token
::
FLOW_SEQ_START
:
case
Token
::
BLOCK_SEQ_START
:
eventHandler
.
OnSequenceStart
(
mark
,
tag
,
anchor
);
HandleSequence
(
eventHandler
);
eventHandler
.
OnSequenceEnd
();
return
;
case
Token
::
FLOW_MAP_START
:
case
Token
::
BLOCK_MAP_START
:
eventHandler
.
OnMapStart
(
mark
,
tag
,
anchor
);
HandleMap
(
eventHandler
);
eventHandler
.
OnMapEnd
();
return
;
case
Token
::
KEY
:
// compact maps can only go in a flow sequence
if
(
m_pCollectionStack
->
GetCurCollectionType
()
==
CollectionType
::
FlowSeq
)
{
eventHandler
.
OnMapStart
(
mark
,
tag
,
anchor
);
HandleMap
(
eventHandler
);
eventHandler
.
OnMapEnd
();
return
;
}
break
;
default:
break
;
}
if
(
tag
==
"?"
)
eventHandler
.
OnNull
(
mark
,
anchor
);
else
eventHandler
.
OnScalar
(
mark
,
tag
,
anchor
,
""
);
}
void
SingleDocParser
::
HandleSequence
(
EventHandler
&
eventHandler
)
{
// split based on start token
switch
(
m_scanner
.
peek
().
type
)
{
case
Token
::
BLOCK_SEQ_START
:
HandleBlockSequence
(
eventHandler
);
break
;
case
Token
::
FLOW_SEQ_START
:
HandleFlowSequence
(
eventHandler
);
break
;
default:
break
;
}
}
void
SingleDocParser
::
HandleBlockSequence
(
EventHandler
&
eventHandler
)
{
// eat start token
m_scanner
.
pop
();
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
BlockSeq
);
while
(
1
)
{
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_SEQ
);
Token
token
=
m_scanner
.
peek
();
if
(
token
.
type
!=
Token
::
BLOCK_ENTRY
&&
token
.
type
!=
Token
::
BLOCK_SEQ_END
)
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
END_OF_SEQ
);
m_scanner
.
pop
();
if
(
token
.
type
==
Token
::
BLOCK_SEQ_END
)
break
;
// check for null
if
(
!
m_scanner
.
empty
())
{
const
Token
&
token
=
m_scanner
.
peek
();
if
(
token
.
type
==
Token
::
BLOCK_ENTRY
||
token
.
type
==
Token
::
BLOCK_SEQ_END
)
{
eventHandler
.
OnNull
(
token
.
mark
,
NullAnchor
);
continue
;
}
}
HandleNode
(
eventHandler
);
}
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
BlockSeq
);
}
void
SingleDocParser
::
HandleFlowSequence
(
EventHandler
&
eventHandler
)
{
// eat start token
m_scanner
.
pop
();
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
FlowSeq
);
while
(
1
)
{
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_SEQ_FLOW
);
// first check for end
if
(
m_scanner
.
peek
().
type
==
Token
::
FLOW_SEQ_END
)
{
m_scanner
.
pop
();
break
;
}
// then read the node
HandleNode
(
eventHandler
);
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_SEQ_FLOW
);
// now eat the separator (or could be a sequence end, which we ignore - but
// if it's neither, then it's a bad node)
Token
&
token
=
m_scanner
.
peek
();
if
(
token
.
type
==
Token
::
FLOW_ENTRY
)
m_scanner
.
pop
();
else
if
(
token
.
type
!=
Token
::
FLOW_SEQ_END
)
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
END_OF_SEQ_FLOW
);
}
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
FlowSeq
);
}
void
SingleDocParser
::
HandleMap
(
EventHandler
&
eventHandler
)
{
// split based on start token
switch
(
m_scanner
.
peek
().
type
)
{
case
Token
::
BLOCK_MAP_START
:
HandleBlockMap
(
eventHandler
);
break
;
case
Token
::
FLOW_MAP_START
:
HandleFlowMap
(
eventHandler
);
break
;
case
Token
::
KEY
:
HandleCompactMap
(
eventHandler
);
break
;
case
Token
::
VALUE
:
HandleCompactMapWithNoKey
(
eventHandler
);
break
;
default:
break
;
}
}
void
SingleDocParser
::
HandleBlockMap
(
EventHandler
&
eventHandler
)
{
// eat start token
m_scanner
.
pop
();
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
BlockMap
);
while
(
1
)
{
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_MAP
);
Token
token
=
m_scanner
.
peek
();
if
(
token
.
type
!=
Token
::
KEY
&&
token
.
type
!=
Token
::
VALUE
&&
token
.
type
!=
Token
::
BLOCK_MAP_END
)
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
END_OF_MAP
);
if
(
token
.
type
==
Token
::
BLOCK_MAP_END
)
{
m_scanner
.
pop
();
break
;
}
// grab key (if non-null)
if
(
token
.
type
==
Token
::
KEY
)
{
m_scanner
.
pop
();
HandleNode
(
eventHandler
);
}
else
{
eventHandler
.
OnNull
(
token
.
mark
,
NullAnchor
);
}
// now grab value (optional)
if
(
!
m_scanner
.
empty
()
&&
m_scanner
.
peek
().
type
==
Token
::
VALUE
)
{
m_scanner
.
pop
();
HandleNode
(
eventHandler
);
}
else
{
eventHandler
.
OnNull
(
token
.
mark
,
NullAnchor
);
}
}
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
BlockMap
);
}
void
SingleDocParser
::
HandleFlowMap
(
EventHandler
&
eventHandler
)
{
// eat start token
m_scanner
.
pop
();
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
FlowMap
);
while
(
1
)
{
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_MAP_FLOW
);
Token
&
token
=
m_scanner
.
peek
();
const
Mark
mark
=
token
.
mark
;
// first check for end
if
(
token
.
type
==
Token
::
FLOW_MAP_END
)
{
m_scanner
.
pop
();
break
;
}
// grab key (if non-null)
if
(
token
.
type
==
Token
::
KEY
)
{
m_scanner
.
pop
();
HandleNode
(
eventHandler
);
}
else
{
eventHandler
.
OnNull
(
mark
,
NullAnchor
);
}
// now grab value (optional)
if
(
!
m_scanner
.
empty
()
&&
m_scanner
.
peek
().
type
==
Token
::
VALUE
)
{
m_scanner
.
pop
();
HandleNode
(
eventHandler
);
}
else
{
eventHandler
.
OnNull
(
mark
,
NullAnchor
);
}
if
(
m_scanner
.
empty
())
throw
ParserException
(
m_scanner
.
mark
(),
ErrorMsg
::
END_OF_MAP_FLOW
);
// now eat the separator (or could be a map end, which we ignore - but if
// it's neither, then it's a bad node)
Token
&
nextToken
=
m_scanner
.
peek
();
if
(
nextToken
.
type
==
Token
::
FLOW_ENTRY
)
m_scanner
.
pop
();
else
if
(
nextToken
.
type
!=
Token
::
FLOW_MAP_END
)
throw
ParserException
(
nextToken
.
mark
,
ErrorMsg
::
END_OF_MAP_FLOW
);
}
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
FlowMap
);
}
// . Single "key: value" pair in a flow sequence
void
SingleDocParser
::
HandleCompactMap
(
EventHandler
&
eventHandler
)
{
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
CompactMap
);
// grab key
Mark
mark
=
m_scanner
.
peek
().
mark
;
m_scanner
.
pop
();
HandleNode
(
eventHandler
);
// now grab value (optional)
if
(
!
m_scanner
.
empty
()
&&
m_scanner
.
peek
().
type
==
Token
::
VALUE
)
{
m_scanner
.
pop
();
HandleNode
(
eventHandler
);
}
else
{
eventHandler
.
OnNull
(
mark
,
NullAnchor
);
}
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
CompactMap
);
}
// . Single ": value" pair in a flow sequence
void
SingleDocParser
::
HandleCompactMapWithNoKey
(
EventHandler
&
eventHandler
)
{
m_pCollectionStack
->
PushCollectionType
(
CollectionType
::
CompactMap
);
// null key
eventHandler
.
OnNull
(
m_scanner
.
peek
().
mark
,
NullAnchor
);
// grab value
m_scanner
.
pop
();
HandleNode
(
eventHandler
);
m_pCollectionStack
->
PopCollectionType
(
CollectionType
::
CompactMap
);
}
// ParseProperties
// . Grabs any tag or anchor tokens and deals with them.
void
SingleDocParser
::
ParseProperties
(
std
::
string
&
tag
,
anchor_t
&
anchor
)
{
tag
.
clear
();
anchor
=
NullAnchor
;
while
(
1
)
{
if
(
m_scanner
.
empty
())
return
;
switch
(
m_scanner
.
peek
().
type
)
{
case
Token
::
TAG
:
ParseTag
(
tag
);
break
;
case
Token
::
ANCHOR
:
ParseAnchor
(
anchor
);
break
;
default:
return
;
}
}
}
void
SingleDocParser
::
ParseTag
(
std
::
string
&
tag
)
{
Token
&
token
=
m_scanner
.
peek
();
if
(
!
tag
.
empty
())
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
MULTIPLE_TAGS
);
Tag
tagInfo
(
token
);
tag
=
tagInfo
.
Translate
(
m_directives
);
m_scanner
.
pop
();
}
void
SingleDocParser
::
ParseAnchor
(
anchor_t
&
anchor
)
{
Token
&
token
=
m_scanner
.
peek
();
if
(
anchor
)
throw
ParserException
(
token
.
mark
,
ErrorMsg
::
MULTIPLE_ANCHORS
);
anchor
=
RegisterAnchor
(
token
.
value
);
m_scanner
.
pop
();
}
anchor_t
SingleDocParser
::
RegisterAnchor
(
const
std
::
string
&
name
)
{
if
(
name
.
empty
())
return
NullAnchor
;
return
m_anchors
[
name
]
=
++
m_curAnchor
;
}
anchor_t
SingleDocParser
::
LookupAnchor
(
const
Mark
&
mark
,
const
std
::
string
&
name
)
const
{
Anchors
::
const_iterator
it
=
m_anchors
.
find
(
name
);
if
(
it
==
m_anchors
.
end
())
throw
ParserException
(
mark
,
ErrorMsg
::
UNKNOWN_ANCHOR
);
return
it
->
second
;
}
}
src/singledocparser.h
View file @
3355bbb3
#ifndef SINGLEDOCPARSER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define SINGLEDOCPARSER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#endif
#include "yaml-cpp/anchor.h"
#include "yaml-cpp/noncopyable.h"
#include <string>
#include <map>
#include <memory>
namespace
YAML
{
struct
Directives
;
struct
Mark
;
struct
Token
;
class
CollectionStack
;
class
EventHandler
;
class
Node
;
class
Scanner
;
class
SingleDocParser
:
private
noncopyable
{
public:
SingleDocParser
(
Scanner
&
scanner
,
const
Directives
&
directives
);
~
SingleDocParser
();
namespace
YAML
{
struct
Directives
;
struct
Mark
;
struct
Token
;
class
CollectionStack
;
class
EventHandler
;
class
Node
;
class
Scanner
;
class
SingleDocParser
:
private
noncopyable
{
public:
SingleDocParser
(
Scanner
&
scanner
,
const
Directives
&
directives
);
~
SingleDocParser
();
void
HandleDocument
(
EventHandler
&
eventHandler
);
private:
void
HandleNode
(
EventHandler
&
eventHandler
);
void
HandleSequence
(
EventHandler
&
eventHandler
);
void
HandleBlockSequence
(
EventHandler
&
eventHandler
);
void
HandleFlowSequence
(
EventHandler
&
eventHandler
);
void
HandleMap
(
EventHandler
&
eventHandler
);
void
HandleBlockMap
(
EventHandler
&
eventHandler
);
void
HandleFlowMap
(
EventHandler
&
eventHandler
);
void
HandleCompactMap
(
EventHandler
&
eventHandler
);
void
HandleCompactMapWithNoKey
(
EventHandler
&
eventHandler
);
void
ParseProperties
(
std
::
string
&
tag
,
anchor_t
&
anchor
);
void
ParseTag
(
std
::
string
&
tag
);
void
ParseAnchor
(
anchor_t
&
anchor
);
anchor_t
RegisterAnchor
(
const
std
::
string
&
name
);
anchor_t
LookupAnchor
(
const
Mark
&
mark
,
const
std
::
string
&
name
)
const
;
private:
Scanner
&
m_scanner
;
const
Directives
&
m_directives
;
std
::
auto_ptr
<
CollectionStack
>
m_pCollectionStack
;
void
HandleDocument
(
EventHandler
&
eventHandler
);
typedef
std
::
map
<
std
::
string
,
anchor_t
>
Anchors
;
Anchors
m_anchors
;
private:
void
HandleNode
(
EventHandler
&
eventHandler
);
void
HandleSequence
(
EventHandler
&
eventHandler
);
void
HandleBlockSequence
(
EventHandler
&
eventHandler
);
void
HandleFlowSequence
(
EventHandler
&
eventHandler
);
void
HandleMap
(
EventHandler
&
eventHandler
);
void
HandleBlockMap
(
EventHandler
&
eventHandler
);
void
HandleFlowMap
(
EventHandler
&
eventHandler
);
void
HandleCompactMap
(
EventHandler
&
eventHandler
);
void
HandleCompactMapWithNoKey
(
EventHandler
&
eventHandler
);
void
ParseProperties
(
std
::
string
&
tag
,
anchor_t
&
anchor
);
void
ParseTag
(
std
::
string
&
tag
);
void
ParseAnchor
(
anchor_t
&
anchor
);
anchor_t
RegisterAnchor
(
const
std
::
string
&
name
);
anchor_t
LookupAnchor
(
const
Mark
&
mark
,
const
std
::
string
&
name
)
const
;
private:
Scanner
&
m_scanner
;
const
Directives
&
m_directives
;
std
::
auto_ptr
<
CollectionStack
>
m_pCollectionStack
;
typedef
std
::
map
<
std
::
string
,
anchor_t
>
Anchors
;
Anchors
m_anchors
;
anchor_t
m_curAnchor
;
};
anchor_t
m_curAnchor
;
};
}
#endif // SINGLEDOCPARSER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif
// SINGLEDOCPARSER_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/stream.cpp
View file @
3355bbb3
...
...
@@ -6,442 +6,439 @@
#define YAML_PREFETCH_SIZE 2048
#endif
#define S_ARRAY_SIZE(
A
) (sizeof(A)
/
sizeof(*(A)))
#define S_ARRAY_END(
A
) ((A) + S_ARRAY_SIZE(A))
#define S_ARRAY_SIZE(
A
) (sizeof(A)
/
sizeof(*(A)))
#define S_ARRAY_END(
A
) ((A) + S_ARRAY_SIZE(A))
#define CP_REPLACEMENT_CHARACTER (0xFFFD)
namespace
YAML
{
enum
UtfIntroState
{
uis_start
,
uis_utfbe_b1
,
uis_utf32be_b2
,
uis_utf32be_bom3
,
uis_utf32be
,
uis_utf16be
,
uis_utf16be_bom1
,
uis_utfle_bom1
,
uis_utf16le_bom2
,
uis_utf32le_bom3
,
uis_utf16le
,
uis_utf32le
,
uis_utf8_imp
,
uis_utf16le_imp
,
uis_utf32le_imp3
,
uis_utf8_bom1
,
uis_utf8_bom2
,
uis_utf8
,
uis_error
};
enum
UtfIntroCharType
{
uict00
,
uictBB
,
uictBF
,
uictEF
,
uictFE
,
uictFF
,
uictAscii
,
uictOther
,
uictMax
};
static
bool
s_introFinalState
[]
=
{
false
,
//uis_start
false
,
//uis_utfbe_b1
false
,
//uis_utf32be_b2
false
,
//uis_utf32be_bom3
true
,
//uis_utf32be
true
,
//uis_utf16be
false
,
//uis_utf16be_bom1
false
,
//uis_utfle_bom1
false
,
//uis_utf16le_bom2
false
,
//uis_utf32le_bom3
true
,
//uis_utf16le
true
,
//uis_utf32le
false
,
//uis_utf8_imp
false
,
//uis_utf16le_imp
false
,
//uis_utf32le_imp3
false
,
//uis_utf8_bom1
false
,
//uis_utf8_bom2
true
,
//uis_utf8
true
,
//uis_error
};
static
UtfIntroState
s_introTransitions
[][
uictMax
]
=
{
// uict00, uictBB, uictBF, uictEF, uictFE, uictFF, uictAscii, uictOther
{
uis_utfbe_b1
,
uis_utf8
,
uis_utf8
,
uis_utf8_bom1
,
uis_utf16be_bom1
,
uis_utfle_bom1
,
uis_utf8_imp
,
uis_utf8
},
{
uis_utf32be_b2
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf16be
,
uis_utf8
},
{
uis_utf32be
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf32be_bom3
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf32be
,
uis_utf8
,
uis_utf8
},
{
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
uis_utf32be
},
{
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf16be
,
uis_utf8
,
uis_utf8
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf16le_bom2
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
uis_utf32le_bom3
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
{
uis_utf32le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
{
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
{
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
uis_utf32le
},
{
uis_utf16le_imp
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
uis_utf32le_imp3
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
{
uis_utf32le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
{
uis_utf8
,
uis_utf8_bom2
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
};
static
char
s_introUngetCount
[][
uictMax
]
=
{
// uict00, uictBB, uictBF, uictEF, uictFE, uictFF, uictAscii, uictOther
{
0
,
1
,
1
,
0
,
0
,
0
,
0
,
1
},
{
0
,
2
,
2
,
2
,
2
,
2
,
2
,
2
},
{
3
,
3
,
3
,
3
,
0
,
3
,
3
,
3
},
{
4
,
4
,
4
,
4
,
4
,
0
,
4
,
4
},
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
{
2
,
2
,
2
,
2
,
2
,
0
,
2
,
2
},
{
2
,
2
,
2
,
2
,
0
,
2
,
2
,
2
},
{
0
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
{
0
,
2
,
2
,
2
,
2
,
2
,
2
,
2
},
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
{
0
,
2
,
2
,
2
,
2
,
2
,
2
,
2
},
{
0
,
3
,
3
,
3
,
3
,
3
,
3
,
3
},
{
4
,
4
,
4
,
4
,
4
,
4
,
4
,
4
},
{
2
,
0
,
2
,
2
,
2
,
2
,
2
,
2
},
{
3
,
3
,
0
,
3
,
3
,
3
,
3
,
3
},
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
};
inline
UtfIntroCharType
IntroCharTypeOf
(
std
::
istream
::
int_type
ch
)
{
if
(
std
::
istream
::
traits_type
::
eof
()
==
ch
)
{
return
uictOther
;
}
switch
(
ch
)
{
case
0
:
return
uict00
;
case
0xBB
:
return
uictBB
;
case
0xBF
:
return
uictBF
;
case
0xEF
:
return
uictEF
;
case
0xFE
:
return
uictFE
;
case
0xFF
:
return
uictFF
;
}
if
((
ch
>
0
)
&&
(
ch
<
0xFF
))
{
return
uictAscii
;
}
return
uictOther
;
}
inline
char
Utf8Adjust
(
unsigned
long
ch
,
unsigned
char
lead_bits
,
unsigned
char
rshift
)
{
const
unsigned
char
header
=
((
1
<<
lead_bits
)
-
1
)
<<
(
8
-
lead_bits
);
const
unsigned
char
mask
=
(
0xFF
>>
(
lead_bits
+
1
));
return
static_cast
<
char
>
(
static_cast
<
unsigned
char
>
(
header
|
((
ch
>>
rshift
)
&
mask
)
));
}
inline
void
QueueUnicodeCodepoint
(
std
::
deque
<
char
>&
q
,
unsigned
long
ch
)
{
// We are not allowed to queue the Stream::eof() codepoint, so
// replace it with CP_REPLACEMENT_CHARACTER
if
(
static_cast
<
unsigned
long
>
(
Stream
::
eof
())
==
ch
)
{
ch
=
CP_REPLACEMENT_CHARACTER
;
}
if
(
ch
<
0x80
)
{
q
.
push_back
(
Utf8Adjust
(
ch
,
0
,
0
));
}
else
if
(
ch
<
0x800
)
{
q
.
push_back
(
Utf8Adjust
(
ch
,
2
,
6
));
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
0
));
}
else
if
(
ch
<
0x10000
)
{
q
.
push_back
(
Utf8Adjust
(
ch
,
3
,
12
));
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
6
));
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
0
));
}
else
{
q
.
push_back
(
Utf8Adjust
(
ch
,
4
,
18
));
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
12
));
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
6
));
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
0
));
}
}
Stream
::
Stream
(
std
::
istream
&
input
)
:
m_input
(
input
),
m_pPrefetched
(
new
unsigned
char
[
YAML_PREFETCH_SIZE
]),
m_nPrefetchedAvailable
(
0
),
m_nPrefetchedUsed
(
0
)
{
typedef
std
::
istream
::
traits_type
char_traits
;
if
(
!
input
)
return
;
// Determine (or guess) the character-set by reading the BOM, if any. See
// the YAML specification for the determination algorithm.
char_traits
::
int_type
intro
[
4
];
int
nIntroUsed
=
0
;
UtfIntroState
state
=
uis_start
;
for
(;
!
s_introFinalState
[
state
];
)
{
std
::
istream
::
int_type
ch
=
input
.
get
();
intro
[
nIntroUsed
++
]
=
ch
;
UtfIntroCharType
charType
=
IntroCharTypeOf
(
ch
);
UtfIntroState
newState
=
s_introTransitions
[
state
][
charType
];
int
nUngets
=
s_introUngetCount
[
state
][
charType
];
if
(
nUngets
>
0
)
{
input
.
clear
();
for
(;
nUngets
>
0
;
--
nUngets
)
{
if
(
char_traits
::
eof
()
!=
intro
[
--
nIntroUsed
])
input
.
putback
(
char_traits
::
to_char_type
(
intro
[
nIntroUsed
]));
}
}
state
=
newState
;
}
switch
(
state
)
{
case
uis_utf8
:
m_charSet
=
utf8
;
break
;
case
uis_utf16le
:
m_charSet
=
utf16le
;
break
;
case
uis_utf16be
:
m_charSet
=
utf16be
;
break
;
case
uis_utf32le
:
m_charSet
=
utf32le
;
break
;
case
uis_utf32be
:
m_charSet
=
utf32be
;
break
;
default:
m_charSet
=
utf8
;
break
;
}
ReadAheadTo
(
0
);
}
Stream
::~
Stream
()
{
delete
[]
m_pPrefetched
;
}
char
Stream
::
peek
()
const
{
if
(
m_readahead
.
empty
())
{
return
Stream
::
eof
();
}
return
m_readahead
[
0
];
}
Stream
::
operator
bool
()
const
{
return
m_input
.
good
()
||
(
!
m_readahead
.
empty
()
&&
m_readahead
[
0
]
!=
Stream
::
eof
());
}
// get
// . Extracts a character from the stream and updates our position
char
Stream
::
get
()
{
char
ch
=
peek
();
AdvanceCurrent
();
m_mark
.
column
++
;
if
(
ch
==
'\n'
)
{
m_mark
.
column
=
0
;
m_mark
.
line
++
;
}
return
ch
;
}
// get
// . Extracts 'n' characters from the stream and updates our position
std
::
string
Stream
::
get
(
int
n
)
{
std
::
string
ret
;
ret
.
reserve
(
n
);
for
(
int
i
=
0
;
i
<
n
;
i
++
)
ret
+=
get
();
return
ret
;
}
// eat
// . Eats 'n' characters and updates our position.
void
Stream
::
eat
(
int
n
)
{
for
(
int
i
=
0
;
i
<
n
;
i
++
)
get
();
}
void
Stream
::
AdvanceCurrent
()
{
if
(
!
m_readahead
.
empty
())
{
m_readahead
.
pop_front
();
m_mark
.
pos
++
;
}
ReadAheadTo
(
0
);
}
bool
Stream
::
_ReadAheadTo
(
size_t
i
)
const
{
while
(
m_input
.
good
()
&&
(
m_readahead
.
size
()
<=
i
))
{
switch
(
m_charSet
)
{
case
utf8
:
StreamInUtf8
();
break
;
case
utf16le
:
StreamInUtf16
();
break
;
case
utf16be
:
StreamInUtf16
();
break
;
case
utf32le
:
StreamInUtf32
();
break
;
case
utf32be
:
StreamInUtf32
();
break
;
}
}
// signal end of stream
if
(
!
m_input
.
good
())
m_readahead
.
push_back
(
Stream
::
eof
());
return
m_readahead
.
size
()
>
i
;
}
void
Stream
::
StreamInUtf8
()
const
{
unsigned
char
b
=
GetNextByte
();
if
(
m_input
.
good
())
{
m_readahead
.
push_back
(
b
);
}
}
void
Stream
::
StreamInUtf16
()
const
{
unsigned
long
ch
=
0
;
unsigned
char
bytes
[
2
];
int
nBigEnd
=
(
m_charSet
==
utf16be
)
?
0
:
1
;
bytes
[
0
]
=
GetNextByte
();
bytes
[
1
]
=
GetNextByte
();
if
(
!
m_input
.
good
())
{
return
;
}
ch
=
(
static_cast
<
unsigned
long
>
(
bytes
[
nBigEnd
])
<<
8
)
|
static_cast
<
unsigned
long
>
(
bytes
[
1
^
nBigEnd
]);
if
(
ch
>=
0xDC00
&&
ch
<
0xE000
)
{
// Trailing (low) surrogate...ugh, wrong order
QueueUnicodeCodepoint
(
m_readahead
,
CP_REPLACEMENT_CHARACTER
);
return
;
}
else
if
(
ch
>=
0xD800
&&
ch
<
0xDC00
)
{
// ch is a leading (high) surrogate
// Four byte UTF-8 code point
// Read the trailing (low) surrogate
for
(;;)
{
bytes
[
0
]
=
GetNextByte
();
bytes
[
1
]
=
GetNextByte
();
if
(
!
m_input
.
good
())
{
QueueUnicodeCodepoint
(
m_readahead
,
CP_REPLACEMENT_CHARACTER
);
return
;
}
unsigned
long
chLow
=
(
static_cast
<
unsigned
long
>
(
bytes
[
nBigEnd
])
<<
8
)
|
static_cast
<
unsigned
long
>
(
bytes
[
1
^
nBigEnd
]);
if
(
chLow
<
0xDC00
||
ch
>=
0xE000
)
{
// Trouble...not a low surrogate. Dump a REPLACEMENT CHARACTER into the stream.
QueueUnicodeCodepoint
(
m_readahead
,
CP_REPLACEMENT_CHARACTER
);
// Deal with the next UTF-16 unit
if
(
chLow
<
0xD800
||
ch
>=
0xE000
)
{
// Easiest case: queue the codepoint and return
QueueUnicodeCodepoint
(
m_readahead
,
ch
);
return
;
}
else
{
// Start the loop over with the new high surrogate
ch
=
chLow
;
continue
;
}
}
// Select the payload bits from the high surrogate
ch
&=
0x3FF
;
ch
<<=
10
;
// Include bits from low surrogate
ch
|=
(
chLow
&
0x3FF
);
// Add the surrogacy offset
ch
+=
0x10000
;
}
}
QueueUnicodeCodepoint
(
m_readahead
,
ch
);
}
inline
char
*
ReadBuffer
(
unsigned
char
*
pBuffer
)
{
return
reinterpret_cast
<
char
*>
(
pBuffer
);
}
unsigned
char
Stream
::
GetNextByte
()
const
{
if
(
m_nPrefetchedUsed
>=
m_nPrefetchedAvailable
)
{
std
::
streambuf
*
pBuf
=
m_input
.
rdbuf
();
m_nPrefetchedAvailable
=
static_cast
<
std
::
size_t
>
(
pBuf
->
sgetn
(
ReadBuffer
(
m_pPrefetched
),
YAML_PREFETCH_SIZE
));
m_nPrefetchedUsed
=
0
;
if
(
!
m_nPrefetchedAvailable
)
{
m_input
.
setstate
(
std
::
ios_base
::
eofbit
);
}
if
(
0
==
m_nPrefetchedAvailable
)
{
return
0
;
}
}
return
m_pPrefetched
[
m_nPrefetchedUsed
++
];
}
void
Stream
::
StreamInUtf32
()
const
{
static
int
indexes
[
2
][
4
]
=
{
{
3
,
2
,
1
,
0
},
{
0
,
1
,
2
,
3
}
};
unsigned
long
ch
=
0
;
unsigned
char
bytes
[
4
];
int
*
pIndexes
=
(
m_charSet
==
utf32be
)
?
indexes
[
1
]
:
indexes
[
0
];
bytes
[
0
]
=
GetNextByte
();
bytes
[
1
]
=
GetNextByte
();
bytes
[
2
]
=
GetNextByte
();
bytes
[
3
]
=
GetNextByte
();
if
(
!
m_input
.
good
())
{
return
;
}
for
(
int
i
=
0
;
i
<
4
;
++
i
)
{
ch
<<=
8
;
ch
|=
bytes
[
pIndexes
[
i
]];
}
QueueUnicodeCodepoint
(
m_readahead
,
ch
);
}
namespace
YAML
{
enum
UtfIntroState
{
uis_start
,
uis_utfbe_b1
,
uis_utf32be_b2
,
uis_utf32be_bom3
,
uis_utf32be
,
uis_utf16be
,
uis_utf16be_bom1
,
uis_utfle_bom1
,
uis_utf16le_bom2
,
uis_utf32le_bom3
,
uis_utf16le
,
uis_utf32le
,
uis_utf8_imp
,
uis_utf16le_imp
,
uis_utf32le_imp3
,
uis_utf8_bom1
,
uis_utf8_bom2
,
uis_utf8
,
uis_error
};
enum
UtfIntroCharType
{
uict00
,
uictBB
,
uictBF
,
uictEF
,
uictFE
,
uictFF
,
uictAscii
,
uictOther
,
uictMax
};
static
bool
s_introFinalState
[]
=
{
false
,
// uis_start
false
,
// uis_utfbe_b1
false
,
// uis_utf32be_b2
false
,
// uis_utf32be_bom3
true
,
// uis_utf32be
true
,
// uis_utf16be
false
,
// uis_utf16be_bom1
false
,
// uis_utfle_bom1
false
,
// uis_utf16le_bom2
false
,
// uis_utf32le_bom3
true
,
// uis_utf16le
true
,
// uis_utf32le
false
,
// uis_utf8_imp
false
,
// uis_utf16le_imp
false
,
// uis_utf32le_imp3
false
,
// uis_utf8_bom1
false
,
// uis_utf8_bom2
true
,
// uis_utf8
true
,
// uis_error
};
static
UtfIntroState
s_introTransitions
[][
uictMax
]
=
{
// uict00, uictBB, uictBF, uictEF,
// uictFE, uictFF, uictAscii, uictOther
{
uis_utfbe_b1
,
uis_utf8
,
uis_utf8
,
uis_utf8_bom1
,
uis_utf16be_bom1
,
uis_utfle_bom1
,
uis_utf8_imp
,
uis_utf8
},
{
uis_utf32be_b2
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf16be
,
uis_utf8
},
{
uis_utf32be
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf32be_bom3
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf32be
,
uis_utf8
,
uis_utf8
},
{
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
uis_utf32be
,
uis_utf32be
},
{
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be
,
uis_utf16be
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf16be
,
uis_utf8
,
uis_utf8
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf16le_bom2
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
uis_utf32le_bom3
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
{
uis_utf32le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
{
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
{
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
uis_utf32le
,
uis_utf32le
},
{
uis_utf16le_imp
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
uis_utf32le_imp3
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
{
uis_utf32le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
,
uis_utf16le
},
{
uis_utf8
,
uis_utf8_bom2
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
{
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
,
uis_utf8
},
};
static
char
s_introUngetCount
[][
uictMax
]
=
{
// uict00, uictBB, uictBF, uictEF, uictFE, uictFF, uictAscii, uictOther
{
0
,
1
,
1
,
0
,
0
,
0
,
0
,
1
},
{
0
,
2
,
2
,
2
,
2
,
2
,
2
,
2
},
{
3
,
3
,
3
,
3
,
0
,
3
,
3
,
3
},
{
4
,
4
,
4
,
4
,
4
,
0
,
4
,
4
},
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
{
2
,
2
,
2
,
2
,
2
,
0
,
2
,
2
},
{
2
,
2
,
2
,
2
,
0
,
2
,
2
,
2
},
{
0
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
{
0
,
2
,
2
,
2
,
2
,
2
,
2
,
2
},
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
{
0
,
2
,
2
,
2
,
2
,
2
,
2
,
2
},
{
0
,
3
,
3
,
3
,
3
,
3
,
3
,
3
},
{
4
,
4
,
4
,
4
,
4
,
4
,
4
,
4
},
{
2
,
0
,
2
,
2
,
2
,
2
,
2
,
2
},
{
3
,
3
,
0
,
3
,
3
,
3
,
3
,
3
},
{
1
,
1
,
1
,
1
,
1
,
1
,
1
,
1
},
};
inline
UtfIntroCharType
IntroCharTypeOf
(
std
::
istream
::
int_type
ch
)
{
if
(
std
::
istream
::
traits_type
::
eof
()
==
ch
)
{
return
uictOther
;
}
switch
(
ch
)
{
case
0
:
return
uict00
;
case
0xBB
:
return
uictBB
;
case
0xBF
:
return
uictBF
;
case
0xEF
:
return
uictEF
;
case
0xFE
:
return
uictFE
;
case
0xFF
:
return
uictFF
;
}
if
((
ch
>
0
)
&&
(
ch
<
0xFF
))
{
return
uictAscii
;
}
return
uictOther
;
}
inline
char
Utf8Adjust
(
unsigned
long
ch
,
unsigned
char
lead_bits
,
unsigned
char
rshift
)
{
const
unsigned
char
header
=
((
1
<<
lead_bits
)
-
1
)
<<
(
8
-
lead_bits
);
const
unsigned
char
mask
=
(
0xFF
>>
(
lead_bits
+
1
));
return
static_cast
<
char
>
(
static_cast
<
unsigned
char
>
(
header
|
((
ch
>>
rshift
)
&
mask
)));
}
inline
void
QueueUnicodeCodepoint
(
std
::
deque
<
char
>&
q
,
unsigned
long
ch
)
{
// We are not allowed to queue the Stream::eof() codepoint, so
// replace it with CP_REPLACEMENT_CHARACTER
if
(
static_cast
<
unsigned
long
>
(
Stream
::
eof
())
==
ch
)
{
ch
=
CP_REPLACEMENT_CHARACTER
;
}
if
(
ch
<
0x80
)
{
q
.
push_back
(
Utf8Adjust
(
ch
,
0
,
0
));
}
else
if
(
ch
<
0x800
)
{
q
.
push_back
(
Utf8Adjust
(
ch
,
2
,
6
));
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
0
));
}
else
if
(
ch
<
0x10000
)
{
q
.
push_back
(
Utf8Adjust
(
ch
,
3
,
12
));
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
6
));
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
0
));
}
else
{
q
.
push_back
(
Utf8Adjust
(
ch
,
4
,
18
));
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
12
));
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
6
));
q
.
push_back
(
Utf8Adjust
(
ch
,
1
,
0
));
}
}
Stream
::
Stream
(
std
::
istream
&
input
)
:
m_input
(
input
),
m_pPrefetched
(
new
unsigned
char
[
YAML_PREFETCH_SIZE
]),
m_nPrefetchedAvailable
(
0
),
m_nPrefetchedUsed
(
0
)
{
typedef
std
::
istream
::
traits_type
char_traits
;
if
(
!
input
)
return
;
// Determine (or guess) the character-set by reading the BOM, if any. See
// the YAML specification for the determination algorithm.
char_traits
::
int_type
intro
[
4
];
int
nIntroUsed
=
0
;
UtfIntroState
state
=
uis_start
;
for
(;
!
s_introFinalState
[
state
];)
{
std
::
istream
::
int_type
ch
=
input
.
get
();
intro
[
nIntroUsed
++
]
=
ch
;
UtfIntroCharType
charType
=
IntroCharTypeOf
(
ch
);
UtfIntroState
newState
=
s_introTransitions
[
state
][
charType
];
int
nUngets
=
s_introUngetCount
[
state
][
charType
];
if
(
nUngets
>
0
)
{
input
.
clear
();
for
(;
nUngets
>
0
;
--
nUngets
)
{
if
(
char_traits
::
eof
()
!=
intro
[
--
nIntroUsed
])
input
.
putback
(
char_traits
::
to_char_type
(
intro
[
nIntroUsed
]));
}
}
state
=
newState
;
}
switch
(
state
)
{
case
uis_utf8
:
m_charSet
=
utf8
;
break
;
case
uis_utf16le
:
m_charSet
=
utf16le
;
break
;
case
uis_utf16be
:
m_charSet
=
utf16be
;
break
;
case
uis_utf32le
:
m_charSet
=
utf32le
;
break
;
case
uis_utf32be
:
m_charSet
=
utf32be
;
break
;
default:
m_charSet
=
utf8
;
break
;
}
ReadAheadTo
(
0
);
}
Stream
::~
Stream
()
{
delete
[]
m_pPrefetched
;
}
char
Stream
::
peek
()
const
{
if
(
m_readahead
.
empty
())
{
return
Stream
::
eof
();
}
return
m_readahead
[
0
];
}
Stream
::
operator
bool
()
const
{
return
m_input
.
good
()
||
(
!
m_readahead
.
empty
()
&&
m_readahead
[
0
]
!=
Stream
::
eof
());
}
// get
// . Extracts a character from the stream and updates our position
char
Stream
::
get
()
{
char
ch
=
peek
();
AdvanceCurrent
();
m_mark
.
column
++
;
if
(
ch
==
'\n'
)
{
m_mark
.
column
=
0
;
m_mark
.
line
++
;
}
return
ch
;
}
// get
// . Extracts 'n' characters from the stream and updates our position
std
::
string
Stream
::
get
(
int
n
)
{
std
::
string
ret
;
ret
.
reserve
(
n
);
for
(
int
i
=
0
;
i
<
n
;
i
++
)
ret
+=
get
();
return
ret
;
}
// eat
// . Eats 'n' characters and updates our position.
void
Stream
::
eat
(
int
n
)
{
for
(
int
i
=
0
;
i
<
n
;
i
++
)
get
();
}
void
Stream
::
AdvanceCurrent
()
{
if
(
!
m_readahead
.
empty
())
{
m_readahead
.
pop_front
();
m_mark
.
pos
++
;
}
ReadAheadTo
(
0
);
}
bool
Stream
::
_ReadAheadTo
(
size_t
i
)
const
{
while
(
m_input
.
good
()
&&
(
m_readahead
.
size
()
<=
i
))
{
switch
(
m_charSet
)
{
case
utf8
:
StreamInUtf8
();
break
;
case
utf16le
:
StreamInUtf16
();
break
;
case
utf16be
:
StreamInUtf16
();
break
;
case
utf32le
:
StreamInUtf32
();
break
;
case
utf32be
:
StreamInUtf32
();
break
;
}
}
// signal end of stream
if
(
!
m_input
.
good
())
m_readahead
.
push_back
(
Stream
::
eof
());
return
m_readahead
.
size
()
>
i
;
}
void
Stream
::
StreamInUtf8
()
const
{
unsigned
char
b
=
GetNextByte
();
if
(
m_input
.
good
())
{
m_readahead
.
push_back
(
b
);
}
}
void
Stream
::
StreamInUtf16
()
const
{
unsigned
long
ch
=
0
;
unsigned
char
bytes
[
2
];
int
nBigEnd
=
(
m_charSet
==
utf16be
)
?
0
:
1
;
bytes
[
0
]
=
GetNextByte
();
bytes
[
1
]
=
GetNextByte
();
if
(
!
m_input
.
good
())
{
return
;
}
ch
=
(
static_cast
<
unsigned
long
>
(
bytes
[
nBigEnd
])
<<
8
)
|
static_cast
<
unsigned
long
>
(
bytes
[
1
^
nBigEnd
]);
if
(
ch
>=
0xDC00
&&
ch
<
0xE000
)
{
// Trailing (low) surrogate...ugh, wrong order
QueueUnicodeCodepoint
(
m_readahead
,
CP_REPLACEMENT_CHARACTER
);
return
;
}
else
if
(
ch
>=
0xD800
&&
ch
<
0xDC00
)
{
// ch is a leading (high) surrogate
// Four byte UTF-8 code point
// Read the trailing (low) surrogate
for
(;;)
{
bytes
[
0
]
=
GetNextByte
();
bytes
[
1
]
=
GetNextByte
();
if
(
!
m_input
.
good
())
{
QueueUnicodeCodepoint
(
m_readahead
,
CP_REPLACEMENT_CHARACTER
);
return
;
}
unsigned
long
chLow
=
(
static_cast
<
unsigned
long
>
(
bytes
[
nBigEnd
])
<<
8
)
|
static_cast
<
unsigned
long
>
(
bytes
[
1
^
nBigEnd
]);
if
(
chLow
<
0xDC00
||
ch
>=
0xE000
)
{
// Trouble...not a low surrogate. Dump a REPLACEMENT CHARACTER into the
// stream.
QueueUnicodeCodepoint
(
m_readahead
,
CP_REPLACEMENT_CHARACTER
);
// Deal with the next UTF-16 unit
if
(
chLow
<
0xD800
||
ch
>=
0xE000
)
{
// Easiest case: queue the codepoint and return
QueueUnicodeCodepoint
(
m_readahead
,
ch
);
return
;
}
else
{
// Start the loop over with the new high surrogate
ch
=
chLow
;
continue
;
}
}
// Select the payload bits from the high surrogate
ch
&=
0x3FF
;
ch
<<=
10
;
// Include bits from low surrogate
ch
|=
(
chLow
&
0x3FF
);
// Add the surrogacy offset
ch
+=
0x10000
;
}
}
QueueUnicodeCodepoint
(
m_readahead
,
ch
);
}
inline
char
*
ReadBuffer
(
unsigned
char
*
pBuffer
)
{
return
reinterpret_cast
<
char
*>
(
pBuffer
);
}
unsigned
char
Stream
::
GetNextByte
()
const
{
if
(
m_nPrefetchedUsed
>=
m_nPrefetchedAvailable
)
{
std
::
streambuf
*
pBuf
=
m_input
.
rdbuf
();
m_nPrefetchedAvailable
=
static_cast
<
std
::
size_t
>
(
pBuf
->
sgetn
(
ReadBuffer
(
m_pPrefetched
),
YAML_PREFETCH_SIZE
));
m_nPrefetchedUsed
=
0
;
if
(
!
m_nPrefetchedAvailable
)
{
m_input
.
setstate
(
std
::
ios_base
::
eofbit
);
}
if
(
0
==
m_nPrefetchedAvailable
)
{
return
0
;
}
}
return
m_pPrefetched
[
m_nPrefetchedUsed
++
];
}
void
Stream
::
StreamInUtf32
()
const
{
static
int
indexes
[
2
][
4
]
=
{{
3
,
2
,
1
,
0
},
{
0
,
1
,
2
,
3
}};
unsigned
long
ch
=
0
;
unsigned
char
bytes
[
4
];
int
*
pIndexes
=
(
m_charSet
==
utf32be
)
?
indexes
[
1
]
:
indexes
[
0
];
bytes
[
0
]
=
GetNextByte
();
bytes
[
1
]
=
GetNextByte
();
bytes
[
2
]
=
GetNextByte
();
bytes
[
3
]
=
GetNextByte
();
if
(
!
m_input
.
good
())
{
return
;
}
for
(
int
i
=
0
;
i
<
4
;
++
i
)
{
ch
<<=
8
;
ch
|=
bytes
[
pIndexes
[
i
]];
}
QueueUnicodeCodepoint
(
m_readahead
,
ch
);
}
}
src/stream.h
View file @
3355bbb3
#ifndef STREAM_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define STREAM_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#endif
#include "yaml-cpp/noncopyable.h"
#include "yaml-cpp/mark.h"
#include <cstddef>
...
...
@@ -15,65 +16,67 @@
#include <set>
#include <string>
namespace
YAML
{
class
Stream
:
private
noncopyable
{
public:
friend
class
StreamCharSource
;
Stream
(
std
::
istream
&
input
);
~
Stream
();
namespace
YAML
{
class
Stream
:
private
noncopyable
{
public:
friend
class
StreamCharSource
;
Stream
(
std
::
istream
&
input
);
~
Stream
();
operator
bool
()
const
;
bool
operator
!
()
const
{
return
!
static_cast
<
bool
>
(
*
this
);
}
char
peek
()
const
;
char
get
();
std
::
string
get
(
int
n
);
void
eat
(
int
n
=
1
);
operator
bool
()
const
;
bool
operator
!
()
const
{
return
!
static_cast
<
bool
>
(
*
this
);
}
static
char
eof
()
{
return
0x04
;
}
char
peek
()
const
;
char
get
();
std
::
string
get
(
int
n
);
void
eat
(
int
n
=
1
);
const
Mark
mark
()
const
{
return
m_mark
;
}
int
pos
()
const
{
return
m_mark
.
pos
;
}
int
line
()
const
{
return
m_mark
.
line
;
}
int
column
()
const
{
return
m_mark
.
column
;
}
void
ResetColumn
()
{
m_mark
.
column
=
0
;
}
static
char
eof
()
{
return
0x04
;
}
const
Mark
mark
()
const
{
return
m_mark
;
}
int
pos
()
const
{
return
m_mark
.
pos
;
}
int
line
()
const
{
return
m_mark
.
line
;
}
int
column
()
const
{
return
m_mark
.
column
;
}
void
ResetColumn
()
{
m_mark
.
column
=
0
;
}
private:
enum
CharacterSet
{
utf8
,
utf16le
,
utf16be
,
utf32le
,
utf32be
};
private:
enum
CharacterSet
{
utf8
,
utf16le
,
utf16be
,
utf32le
,
utf32be
}
;
std
::
istream
&
m_input
;
Mark
m_mark
;
std
::
istream
&
m_input
;
Mark
m_mark
;
CharacterSet
m_charSet
;
mutable
std
::
deque
<
char
>
m_readahead
;
unsigned
char
*
const
m_pPrefetched
;
mutable
size_t
m_nPrefetchedAvailable
;
mutable
size_t
m_nPrefetchedUsed
;
void
AdvanceCurrent
();
char
CharAt
(
size_t
i
)
const
;
bool
ReadAheadTo
(
size_t
i
)
const
;
bool
_ReadAheadTo
(
size_t
i
)
const
;
void
StreamInUtf8
()
const
;
void
StreamInUtf16
()
const
;
void
StreamInUtf32
()
const
;
unsigned
char
GetNextByte
()
const
;
};
CharacterSet
m_charSet
;
mutable
std
::
deque
<
char
>
m_readahead
;
unsigned
char
*
const
m_pPrefetched
;
mutable
size_t
m_nPrefetchedAvailable
;
mutable
size_t
m_nPrefetchedUsed
;
// CharAt
// . Unchecked access
inline
char
Stream
::
CharAt
(
size_t
i
)
const
{
return
m_readahead
[
i
];
}
inline
bool
Stream
::
ReadAheadTo
(
size_t
i
)
const
{
if
(
m_readahead
.
size
()
>
i
)
return
true
;
return
_ReadAheadTo
(
i
);
}
void
AdvanceCurrent
();
char
CharAt
(
size_t
i
)
const
;
bool
ReadAheadTo
(
size_t
i
)
const
;
bool
_ReadAheadTo
(
size_t
i
)
const
;
void
StreamInUtf8
()
const
;
void
StreamInUtf16
()
const
;
void
StreamInUtf32
()
const
;
unsigned
char
GetNextByte
()
const
;
};
// CharAt
// . Unchecked access
inline
char
Stream
::
CharAt
(
size_t
i
)
const
{
return
m_readahead
[
i
];
}
inline
bool
Stream
::
ReadAheadTo
(
size_t
i
)
const
{
if
(
m_readahead
.
size
()
>
i
)
return
true
;
return
_ReadAheadTo
(
i
);
}
}
#endif // STREAM_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif
// STREAM_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/streamcharsource.h
View file @
3355bbb3
#ifndef STREAMCHARSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define STREAMCHARSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#endif
#include "yaml-cpp/noncopyable.h"
#include <cstddef>
namespace
YAML
{
class
StreamCharSource
{
public:
StreamCharSource
(
const
Stream
&
stream
)
:
m_offset
(
0
),
m_stream
(
stream
)
{}
StreamCharSource
(
const
StreamCharSource
&
source
)
:
m_offset
(
source
.
m_offset
),
m_stream
(
source
.
m_stream
)
{}
~
StreamCharSource
()
{}
operator
bool
()
const
;
char
operator
[]
(
std
::
size_t
i
)
const
{
return
m_stream
.
CharAt
(
m_offset
+
i
);
}
bool
operator
!
()
const
{
return
!
static_cast
<
bool
>
(
*
this
);
}
const
StreamCharSource
operator
+
(
int
i
)
const
;
private:
std
::
size_t
m_offset
;
const
Stream
&
m_stream
;
StreamCharSource
&
operator
=
(
const
StreamCharSource
&
);
// non-assignable
};
inline
StreamCharSource
::
operator
bool
()
const
{
return
m_stream
.
ReadAheadTo
(
m_offset
);
}
inline
const
StreamCharSource
StreamCharSource
::
operator
+
(
int
i
)
const
{
StreamCharSource
source
(
*
this
);
if
(
static_cast
<
int
>
(
source
.
m_offset
)
+
i
>=
0
)
source
.
m_offset
+=
i
;
else
source
.
m_offset
=
0
;
return
source
;
}
namespace
YAML
{
class
StreamCharSource
{
public:
StreamCharSource
(
const
Stream
&
stream
)
:
m_offset
(
0
),
m_stream
(
stream
)
{}
StreamCharSource
(
const
StreamCharSource
&
source
)
:
m_offset
(
source
.
m_offset
),
m_stream
(
source
.
m_stream
)
{}
~
StreamCharSource
()
{}
operator
bool
()
const
;
char
operator
[](
std
::
size_t
i
)
const
{
return
m_stream
.
CharAt
(
m_offset
+
i
);
}
bool
operator
!
()
const
{
return
!
static_cast
<
bool
>
(
*
this
);
}
const
StreamCharSource
operator
+
(
int
i
)
const
;
private:
std
::
size_t
m_offset
;
const
Stream
&
m_stream
;
StreamCharSource
&
operator
=
(
const
StreamCharSource
&
);
// non-assignable
};
inline
StreamCharSource
::
operator
bool
()
const
{
return
m_stream
.
ReadAheadTo
(
m_offset
);
}
inline
const
StreamCharSource
StreamCharSource
::
operator
+
(
int
i
)
const
{
StreamCharSource
source
(
*
this
);
if
(
static_cast
<
int
>
(
source
.
m_offset
)
+
i
>=
0
)
source
.
m_offset
+=
i
;
else
source
.
m_offset
=
0
;
return
source
;
}
}
#endif // STREAMCHARSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif
// STREAMCHARSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/stringsource.h
View file @
3355bbb3
#ifndef STRINGSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define STRINGSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#endif
#include <cstddef>
namespace
YAML
{
class
StringCharSource
{
public:
StringCharSource
(
const
char
*
str
,
std
::
size_t
size
)
:
m_str
(
str
),
m_size
(
size
),
m_offset
(
0
)
{}
operator
bool
(
)
const
{
return
m_offset
<
m_size
;
}
char
operator
[]
(
std
::
size_t
i
)
const
{
return
m_str
[
m_offset
+
i
]
;
}
bool
operator
!
()
const
{
return
!
static_cast
<
bool
>
(
*
this
);
}
const
StringCharSource
operator
+
(
int
i
)
const
{
StringCharSource
source
(
*
this
);
if
(
static_cast
<
int
>
(
source
.
m_offset
)
+
i
>=
0
)
source
.
m_offset
+=
i
;
else
source
.
m_offset
=
0
;
return
source
;
}
StringCharSource
&
operator
++
()
{
++
m_offset
;
return
*
this
;
}
StringCharSource
&
operator
+=
(
std
::
size_t
offset
)
{
m_offset
+=
offset
;
return
*
this
;
}
private:
const
char
*
m_str
;
std
::
size_t
m_size
;
std
::
size_t
m_offset
;
};
namespace
YAML
{
class
StringCharSource
{
public:
StringCharSource
(
const
char
*
str
,
std
::
size_t
size
)
:
m_str
(
str
),
m_size
(
size
),
m_offset
(
0
)
{}
operator
bool
()
const
{
return
m_offset
<
m_size
;
}
char
operator
[](
std
::
size_t
i
)
const
{
return
m_str
[
m_offset
+
i
]
;
}
bool
operator
!
()
const
{
return
!
static_cast
<
bool
>
(
*
this
)
;
}
const
StringCharSource
operator
+
(
int
i
)
const
{
StringCharSource
source
(
*
this
);
if
(
static_cast
<
int
>
(
source
.
m_offset
)
+
i
>=
0
)
source
.
m_offset
+
=
i
;
else
source
.
m_offset
=
0
;
return
source
;
}
StringCharSource
&
operator
++
()
{
++
m_offset
;
return
*
this
;
}
StringCharSource
&
operator
+=
(
std
::
size_t
offset
)
{
m_offset
+=
offset
;
return
*
this
;
}
private:
const
char
*
m_str
;
std
::
size_t
m_size
;
std
::
size_t
m_offset
;
};
}
#endif // STRINGSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif
// STRINGSOURCE_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/tag.cpp
View file @
3355bbb3
...
...
@@ -4,49 +4,45 @@
#include <cassert>
#include <stdexcept>
namespace
YAML
{
Tag
::
Tag
(
const
Token
&
token
)
:
type
(
static_cast
<
TYPE
>
(
token
.
data
))
{
switch
(
type
)
{
case
VERBATIM
:
value
=
token
.
value
;
break
;
case
PRIMARY_HANDLE
:
value
=
token
.
value
;
break
;
case
SECONDARY_HANDLE
:
value
=
token
.
value
;
break
;
case
NAMED_HANDLE
:
handle
=
token
.
value
;
value
=
token
.
params
[
0
];
break
;
case
NON_SPECIFIC
:
break
;
default:
assert
(
false
);
}
}
const
std
::
string
Tag
::
Translate
(
const
Directives
&
directives
)
{
switch
(
type
)
{
case
VERBATIM
:
return
value
;
case
PRIMARY_HANDLE
:
return
directives
.
TranslateTagHandle
(
"!"
)
+
value
;
case
SECONDARY_HANDLE
:
return
directives
.
TranslateTagHandle
(
"!!"
)
+
value
;
case
NAMED_HANDLE
:
return
directives
.
TranslateTagHandle
(
"!"
+
handle
+
"!"
)
+
value
;
case
NON_SPECIFIC
:
// TODO:
return
"!"
;
default:
assert
(
false
);
}
throw
std
::
runtime_error
(
"yaml-cpp: internal error, bad tag type"
);
}
namespace
YAML
{
Tag
::
Tag
(
const
Token
&
token
)
:
type
(
static_cast
<
TYPE
>
(
token
.
data
))
{
switch
(
type
)
{
case
VERBATIM
:
value
=
token
.
value
;
break
;
case
PRIMARY_HANDLE
:
value
=
token
.
value
;
break
;
case
SECONDARY_HANDLE
:
value
=
token
.
value
;
break
;
case
NAMED_HANDLE
:
handle
=
token
.
value
;
value
=
token
.
params
[
0
];
break
;
case
NON_SPECIFIC
:
break
;
default:
assert
(
false
);
}
}
const
std
::
string
Tag
::
Translate
(
const
Directives
&
directives
)
{
switch
(
type
)
{
case
VERBATIM
:
return
value
;
case
PRIMARY_HANDLE
:
return
directives
.
TranslateTagHandle
(
"!"
)
+
value
;
case
SECONDARY_HANDLE
:
return
directives
.
TranslateTagHandle
(
"!!"
)
+
value
;
case
NAMED_HANDLE
:
return
directives
.
TranslateTagHandle
(
"!"
+
handle
+
"!"
)
+
value
;
case
NON_SPECIFIC
:
// TODO:
return
"!"
;
default:
assert
(
false
);
}
throw
std
::
runtime_error
(
"yaml-cpp: internal error, bad tag type"
);
}
}
src/tag.h
View file @
3355bbb3
#ifndef TAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define TAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#endif
#include <string>
namespace
YAML
{
struct
Token
;
struct
Directives
;
namespace
YAML
{
struct
Token
;
struct
Directives
;
struct
Tag
{
enum
TYPE
{
VERBATIM
,
PRIMARY_HANDLE
,
SECONDARY_HANDLE
,
NAMED_HANDLE
,
NON_SPECIFIC
};
Tag
(
const
Token
&
token
);
const
std
::
string
Translate
(
const
Directives
&
directives
);
TYPE
type
;
std
::
string
handle
,
value
;
};
struct
Tag
{
enum
TYPE
{
VERBATIM
,
PRIMARY_HANDLE
,
SECONDARY_HANDLE
,
NAMED_HANDLE
,
NON_SPECIFIC
};
Tag
(
const
Token
&
token
);
const
std
::
string
Translate
(
const
Directives
&
directives
);
TYPE
type
;
std
::
string
handle
,
value
;
};
}
#endif // TAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif
// TAG_H_62B23520_7C8E_11DE_8A39_0800200C9A66
src/token.h
View file @
3355bbb3
#ifndef TOKEN_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define TOKEN_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#endif
#include "yaml-cpp/mark.h"
#include <iostream>
#include <string>
#include <vector>
namespace
YAML
{
const
std
::
string
TokenNames
[]
=
{
"DIRECTIVE"
,
"DOC_START"
,
"DOC_END"
,
"BLOCK_SEQ_START"
,
"BLOCK_MAP_START"
,
"BLOCK_SEQ_END"
,
"BLOCK_MAP_END"
,
"BLOCK_ENTRY"
,
"FLOW_SEQ_START"
,
"FLOW_MAP_START"
,
"FLOW_SEQ_END"
,
"FLOW_MAP_END"
,
"FLOW_MAP_COMPACT"
,
"FLOW_ENTRY"
,
"KEY"
,
"VALUE"
,
"ANCHOR"
,
"ALIAS"
,
"TAG"
,
"SCALAR"
};
namespace
YAML
{
const
std
::
string
TokenNames
[]
=
{
"DIRECTIVE"
,
"DOC_START"
,
"DOC_END"
,
"BLOCK_SEQ_START"
,
"BLOCK_MAP_START"
,
"BLOCK_SEQ_END"
,
"BLOCK_MAP_END"
,
"BLOCK_ENTRY"
,
"FLOW_SEQ_START"
,
"FLOW_MAP_START"
,
"FLOW_SEQ_END"
,
"FLOW_MAP_END"
,
"FLOW_MAP_COMPACT"
,
"FLOW_ENTRY"
,
"KEY"
,
"VALUE"
,
"ANCHOR"
,
"ALIAS"
,
"TAG"
,
"SCALAR"
};
struct
Token
{
// enums
enum
STATUS
{
VALID
,
INVALID
,
UNVERIFIED
};
enum
TYPE
{
DIRECTIVE
,
DOC_START
,
DOC_END
,
BLOCK_SEQ_START
,
BLOCK_MAP_START
,
BLOCK_SEQ_END
,
BLOCK_MAP_END
,
BLOCK_ENTRY
,
FLOW_SEQ_START
,
FLOW_MAP_START
,
FLOW_SEQ_END
,
FLOW_MAP_END
,
FLOW_MAP_COMPACT
,
FLOW_ENTRY
,
KEY
,
VALUE
,
ANCHOR
,
ALIAS
,
TAG
,
PLAIN_SCALAR
,
NON_PLAIN_SCALAR
};
struct
Token
{
// enums
enum
STATUS
{
VALID
,
INVALID
,
UNVERIFIED
};
enum
TYPE
{
DIRECTIVE
,
DOC_START
,
DOC_END
,
BLOCK_SEQ_START
,
BLOCK_MAP_START
,
BLOCK_SEQ_END
,
BLOCK_MAP_END
,
BLOCK_ENTRY
,
FLOW_SEQ_START
,
FLOW_MAP_START
,
FLOW_SEQ_END
,
FLOW_MAP_END
,
FLOW_MAP_COMPACT
,
FLOW_ENTRY
,
KEY
,
VALUE
,
ANCHOR
,
ALIAS
,
TAG
,
PLAIN_SCALAR
,
NON_PLAIN_SCALAR
};
// data
Token
(
TYPE
type_
,
const
Mark
&
mark_
)
:
status
(
VALID
),
type
(
type_
),
mark
(
mark_
),
data
(
0
)
{}
// data
Token
(
TYPE
type_
,
const
Mark
&
mark_
)
:
status
(
VALID
),
type
(
type_
),
mark
(
mark_
),
data
(
0
)
{}
friend
std
::
ostream
&
operator
<<
(
std
::
ostream
&
out
,
const
Token
&
token
)
{
out
<<
TokenNames
[
token
.
type
]
<<
std
::
string
(
": "
)
<<
token
.
value
;
for
(
std
::
size_t
i
=
0
;
i
<
token
.
params
.
size
();
i
++
)
out
<<
std
::
string
(
" "
)
<<
token
.
params
[
i
];
return
out
;
}
friend
std
::
ostream
&
operator
<<
(
std
::
ostream
&
out
,
const
Token
&
token
)
{
out
<<
TokenNames
[
token
.
type
]
<<
std
::
string
(
": "
)
<<
token
.
value
;
for
(
std
::
size_t
i
=
0
;
i
<
token
.
params
.
size
();
i
++
)
out
<<
std
::
string
(
" "
)
<<
token
.
params
[
i
];
return
out
;
}
STATUS
status
;
TYPE
type
;
Mark
mark
;
std
::
string
value
;
std
::
vector
<
std
::
string
>
params
;
int
data
;
};
STATUS
status
;
TYPE
type
;
Mark
mark
;
std
::
string
value
;
std
::
vector
<
std
::
string
>
params
;
int
data
;
};
}
#endif // TOKEN_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif
// TOKEN_H_62B23520_7C8E_11DE_8A39_0800200C9A66
test/core/parsertests.cpp
View file @
3355bbb3
...
...
@@ -3,75 +3,74 @@
#include "yaml-cpp/yaml.h"
#include <iostream>
namespace
Test
{
namespace
Parser
{
TEST
NoEndOfMapFlow
()
{
try
{
HANDLE
(
"---{header: {id: 1"
);
}
catch
(
const
YAML
::
ParserException
&
e
)
{
YAML_ASSERT
(
e
.
msg
==
std
::
string
(
YAML
::
ErrorMsg
::
END_OF_MAP_FLOW
));
return
true
;
}
return
" no exception caught"
;
}
TEST
PlainScalarStartingWithQuestionMark
()
{
HANDLE
(
"foo: ?bar"
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"?"
,
0
,
"?bar"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
namespace
Test
{
namespace
Parser
{
TEST
NoEndOfMapFlow
()
{
try
{
HANDLE
(
"---{header: {id: 1"
);
}
catch
(
const
YAML
::
ParserException
&
e
)
{
YAML_ASSERT
(
e
.
msg
==
std
::
string
(
YAML
::
ErrorMsg
::
END_OF_MAP_FLOW
));
return
true
;
}
return
" no exception caught"
;
}
TEST
PlainScalarStartingWithQuestionMark
()
{
HANDLE
(
"foo: ?bar"
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"?"
,
0
,
"?bar"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
TEST
NullStringScalar
()
{
HANDLE
(
"foo: null"
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_NULL
(
0
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
}
namespace
{
void
RunParserTest
(
TEST
(
*
test
)(),
const
std
::
string
&
name
,
int
&
passed
,
int
&
total
)
{
TEST
ret
;
try
{
ret
=
test
();
}
catch
(
const
YAML
::
Exception
&
e
)
{
ret
.
ok
=
false
;
ret
.
error
=
std
::
string
(
" Exception caught: "
)
+
e
.
what
();
}
TEST
NullStringScalar
()
{
HANDLE
(
"foo: null"
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_NULL
(
0
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
}
namespace
{
void
RunParserTest
(
TEST
(
*
test
)(),
const
std
::
string
&
name
,
int
&
passed
,
int
&
total
)
{
TEST
ret
;
try
{
ret
=
test
();
}
catch
(
const
YAML
::
Exception
&
e
)
{
ret
.
ok
=
false
;
ret
.
error
=
std
::
string
(
" Exception caught: "
)
+
e
.
what
();
}
if
(
!
ret
.
ok
)
{
std
::
cout
<<
"Parser test failed: "
<<
name
<<
"
\n
"
;
std
::
cout
<<
ret
.
error
<<
"
\n
"
;
}
if
(
ret
.
ok
)
passed
++
;
total
++
;
}
}
bool
RunParserTests
()
{
int
passed
=
0
;
int
total
=
0
;
RunParserTest
(
&
Parser
::
NoEndOfMapFlow
,
"No end of map flow"
,
passed
,
total
);
RunParserTest
(
&
Parser
::
PlainScalarStartingWithQuestionMark
,
"Plain scalar starting with question mark"
,
passed
,
total
);
RunParserTest
(
&
Parser
::
NullStringScalar
,
"Null string scalar"
,
passed
,
total
);
std
::
cout
<<
"Parser tests: "
<<
passed
<<
"/"
<<
total
<<
" passed
\n
"
;
return
passed
==
total
;
}
if
(
!
ret
.
ok
)
{
std
::
cout
<<
"Parser test failed: "
<<
name
<<
"
\n
"
;
std
::
cout
<<
ret
.
error
<<
"
\n
"
;
}
if
(
ret
.
ok
)
passed
++
;
total
++
;
}
}
bool
RunParserTests
()
{
int
passed
=
0
;
int
total
=
0
;
RunParserTest
(
&
Parser
::
NoEndOfMapFlow
,
"No end of map flow"
,
passed
,
total
);
RunParserTest
(
&
Parser
::
PlainScalarStartingWithQuestionMark
,
"Plain scalar starting with question mark"
,
passed
,
total
);
RunParserTest
(
&
Parser
::
NullStringScalar
,
"Null string scalar"
,
passed
,
total
);
std
::
cout
<<
"Parser tests: "
<<
passed
<<
"/"
<<
total
<<
" passed
\n
"
;
return
passed
==
total
;
}
}
test/core/spectests.cpp
View file @
3355bbb3
...
...
@@ -5,1935 +5,1854 @@
#include <cassert>
namespace
Test
{
namespace
Spec
{
// 2.1
TEST
SeqScalars
()
{
HANDLE
(
ex2_1
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.2
TEST
MappingScalarsToScalars
()
{
HANDLE
(
ex2_2
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_SCALAR
(
"?"
,
0
,
"rbi"
);
EXPECT_SCALAR
(
"?"
,
0
,
"147"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.3
TEST
MappingScalarsToSequences
()
{
HANDLE
(
ex2_3
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"american"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Boston Red Sox"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Detroit Tigers"
);
EXPECT_SCALAR
(
"?"
,
0
,
"New York Yankees"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"national"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"New York Mets"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Chicago Cubs"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Atlanta Braves"
);
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.4
TEST
SequenceOfMappings
()
{
HANDLE
(
ex2_4
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"name"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"name"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"63"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.288"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.5
TEST
SequenceOfSequences
()
{
HANDLE
(
ex2_5
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"name"
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"63"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.288"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.6
TEST
MappingOfMappings
()
{
HANDLE
(
ex2_6
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"63"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.288"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.7
TEST
TwoDocumentsInAStream
()
{
HANDLE
(
ex2_7
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Chicago Cubs"
);
EXPECT_SCALAR
(
"?"
,
0
,
"St Louis Cardinals"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.8
TEST
PlayByPlayFeed
()
{
HANDLE
(
ex2_8
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"time"
);
EXPECT_SCALAR
(
"?"
,
0
,
"20:03:20"
);
EXPECT_SCALAR
(
"?"
,
0
,
"player"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"action"
);
EXPECT_SCALAR
(
"?"
,
0
,
"strike (miss)"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"time"
);
EXPECT_SCALAR
(
"?"
,
0
,
"20:03:47"
);
EXPECT_SCALAR
(
"?"
,
0
,
"player"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"action"
);
EXPECT_SCALAR
(
"?"
,
0
,
"grand slam"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.9
TEST
SingleDocumentWithTwoComments
()
{
HANDLE
(
ex2_9
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"rbi"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.10
TEST
SimpleAnchor
()
{
HANDLE
(
ex2_10
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
1
,
"Sammy Sosa"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"rbi"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.11
TEST
MappingBetweenSequences
()
{
HANDLE
(
ex2_11
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Detroit Tigers"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Chicago cubs"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-07-23"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"New York Yankees"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Atlanta Braves"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-07-02"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-08-12"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-08-14"
);
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.12
TEST
CompactNestedMapping
()
{
HANDLE
(
ex2_12
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"item"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Super Hoop"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
EXPECT_SCALAR
(
"?"
,
0
,
"1"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"item"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Basketball"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
EXPECT_SCALAR
(
"?"
,
0
,
"4"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"item"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Big Shoes"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
EXPECT_SCALAR
(
"?"
,
0
,
"1"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.13
TEST
InLiteralsNewlinesArePreserved
()
{
HANDLE
(
ex2_13
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\\
//||
\\
/||
\n
"
"// || ||__"
);
EXPECT_DOC_END
();
DONE
();
}
// 2.14
TEST
InFoldedScalarsNewlinesBecomeSpaces
()
{
HANDLE
(
ex2_14
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"Mark McGwire's year was crippled by a knee injury."
);
EXPECT_DOC_END
();
DONE
();
}
// 2.15
TEST
FoldedNewlinesArePreservedForMoreIndentedAndBlankLines
()
{
HANDLE
(
ex2_15
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"Sammy Sosa completed another fine season with great stats.
\n
"
"
\n
"
" 63 Home Runs
\n
"
" 0.288 Batting Average
\n
"
"
\n
"
"What a year!"
);
EXPECT_DOC_END
();
DONE
();
}
// 2.16
TEST
IndentationDeterminesScope
()
{
HANDLE
(
ex2_16
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"name"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"accomplishment"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Mark set a major league home run record in 1998.
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"stats"
);
EXPECT_SCALAR
(
"!"
,
0
,
"65 Home Runs
\n
"
"0.278 Batting Average
\n
"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.17
TEST
QuotedScalars
()
{
HANDLE
(
ex2_17
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"unicode"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Sosa did fine.\u263A"
);
EXPECT_SCALAR
(
"?"
,
0
,
"control"
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\b
1998
\t
1999
\t
2000
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"hex esc"
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\x0d\x0a
is
\r\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"single"
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\"
Howdy!
\"
he cried."
);
EXPECT_SCALAR
(
"?"
,
0
,
"quoted"
);
EXPECT_SCALAR
(
"!"
,
0
,
" # Not a 'comment'."
);
EXPECT_SCALAR
(
"?"
,
0
,
"tie-fighter"
);
EXPECT_SCALAR
(
"!"
,
0
,
"|
\\
-*-/|"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.18
TEST
MultiLineFlowScalars
()
{
HANDLE
(
ex2_18
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"plain"
);
EXPECT_SCALAR
(
"?"
,
0
,
"This unquoted scalar spans many lines."
);
EXPECT_SCALAR
(
"?"
,
0
,
"quoted"
);
EXPECT_SCALAR
(
"!"
,
0
,
"So does this quoted scalar.
\n
"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// TODO: 2.19 - 2.22 schema tags
// 2.23
TEST
VariousExplicitTags
()
{
HANDLE
(
ex2_23
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"not-date"
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"2002-04-28"
);
EXPECT_SCALAR
(
"?"
,
0
,
"picture"
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:binary"
,
0
,
"R0lGODlhDAAMAIQAAP//9/X
\n
"
"17unp5WZmZgAAAOfn515eXv
\n
"
"Pz7Y6OjuDg4J+fn5OTk6enp
\n
"
"56enmleECcgggoBADs=
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"application specific tag"
);
EXPECT_SCALAR
(
"!something"
,
0
,
"The semantics of the tag
\n
"
"above may be different for
\n
"
"different documents."
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.24
TEST
GlobalTags
()
{
HANDLE
(
ex2_24
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"tag:clarkevans.com,2002:shape"
,
0
);
EXPECT_MAP_START
(
"tag:clarkevans.com,2002:circle"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"center"
);
EXPECT_MAP_START
(
"?"
,
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"x"
);
EXPECT_SCALAR
(
"?"
,
0
,
"73"
);
EXPECT_SCALAR
(
"?"
,
0
,
"y"
);
EXPECT_SCALAR
(
"?"
,
0
,
"129"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"radius"
);
EXPECT_SCALAR
(
"?"
,
0
,
"7"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"tag:clarkevans.com,2002:line"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"start"
);
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"finish"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"x"
);
EXPECT_SCALAR
(
"?"
,
0
,
"89"
);
EXPECT_SCALAR
(
"?"
,
0
,
"y"
);
EXPECT_SCALAR
(
"?"
,
0
,
"102"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"tag:clarkevans.com,2002:label"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"start"
);
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"color"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0xFFEEBB"
);
EXPECT_SCALAR
(
"?"
,
0
,
"text"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Pretty vector drawing."
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.25
TEST
UnorderedSets
()
{
HANDLE
(
ex2_25
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"tag:yaml.org,2002:set"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_NULL
(
0
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.26
TEST
OrderedMappings
()
{
HANDLE
(
ex2_26
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"tag:yaml.org,2002:omap"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"63"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_SCALAR
(
"?"
,
0
,
"58"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.27
TEST
Invoice
()
{
HANDLE
(
ex2_27
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"tag:clarkevans.com,2002:invoice"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"invoice"
);
EXPECT_SCALAR
(
"?"
,
0
,
"34843"
);
EXPECT_SCALAR
(
"?"
,
0
,
"date"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-01-23"
);
EXPECT_SCALAR
(
"?"
,
0
,
"bill-to"
);
EXPECT_MAP_START
(
"?"
,
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"given"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Chris"
);
EXPECT_SCALAR
(
"?"
,
0
,
"family"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Dumars"
);
EXPECT_SCALAR
(
"?"
,
0
,
"address"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"lines"
);
EXPECT_SCALAR
(
"!"
,
0
,
"458 Walkman Dr.
\n
"
"Suite #292
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"city"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Royal Oak"
);
EXPECT_SCALAR
(
"?"
,
0
,
"state"
);
EXPECT_SCALAR
(
"?"
,
0
,
"MI"
);
EXPECT_SCALAR
(
"?"
,
0
,
"postal"
);
EXPECT_SCALAR
(
"?"
,
0
,
"48046"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"ship-to"
);
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"product"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sku"
);
EXPECT_SCALAR
(
"?"
,
0
,
"BL394D"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
EXPECT_SCALAR
(
"?"
,
0
,
"4"
);
EXPECT_SCALAR
(
"?"
,
0
,
"description"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Basketball"
);
EXPECT_SCALAR
(
"?"
,
0
,
"price"
);
EXPECT_SCALAR
(
"?"
,
0
,
"450.00"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sku"
);
EXPECT_SCALAR
(
"?"
,
0
,
"BL4438H"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
EXPECT_SCALAR
(
"?"
,
0
,
"1"
);
EXPECT_SCALAR
(
"?"
,
0
,
"description"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Super Hoop"
);
EXPECT_SCALAR
(
"?"
,
0
,
"price"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2392.00"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"tax"
);
EXPECT_SCALAR
(
"?"
,
0
,
"251.42"
);
EXPECT_SCALAR
(
"?"
,
0
,
"total"
);
EXPECT_SCALAR
(
"?"
,
0
,
"4443.52"
);
EXPECT_SCALAR
(
"?"
,
0
,
"comments"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338."
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.28
TEST
LogFile
()
{
HANDLE
(
ex2_28
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Time"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-11-23 15:01:42 -5"
);
EXPECT_SCALAR
(
"?"
,
0
,
"User"
);
EXPECT_SCALAR
(
"?"
,
0
,
"ed"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Warning"
);
EXPECT_SCALAR
(
"?"
,
0
,
"This is an error message for the log file"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Time"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-11-23 15:02:31 -5"
);
EXPECT_SCALAR
(
"?"
,
0
,
"User"
);
EXPECT_SCALAR
(
"?"
,
0
,
"ed"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Warning"
);
EXPECT_SCALAR
(
"?"
,
0
,
"A slightly different error message."
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Date"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-11-23 15:03:17 -5"
);
EXPECT_SCALAR
(
"?"
,
0
,
"User"
);
EXPECT_SCALAR
(
"?"
,
0
,
"ed"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Fatal"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Unknown variable
\"
bar
\"
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Stack"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"file"
);
EXPECT_SCALAR
(
"?"
,
0
,
"TopClass.py"
);
EXPECT_SCALAR
(
"?"
,
0
,
"line"
);
EXPECT_SCALAR
(
"?"
,
0
,
"23"
);
EXPECT_SCALAR
(
"?"
,
0
,
"code"
);
EXPECT_SCALAR
(
"!"
,
0
,
"x = MoreObject(
\"
345
\\
n
\"
)
\n
"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"file"
);
EXPECT_SCALAR
(
"?"
,
0
,
"MoreClass.py"
);
EXPECT_SCALAR
(
"?"
,
0
,
"line"
);
EXPECT_SCALAR
(
"?"
,
0
,
"58"
);
EXPECT_SCALAR
(
"?"
,
0
,
"code"
);
EXPECT_SCALAR
(
"!"
,
0
,
"foo = bar"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// TODO: 5.1 - 5.2 BOM
// 5.3
TEST
BlockStructureIndicators
()
{
HANDLE
(
ex5_3
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sequence"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"mapping"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sky"
);
EXPECT_SCALAR
(
"?"
,
0
,
"blue"
);
EXPECT_SCALAR
(
"?"
,
0
,
"sea"
);
EXPECT_SCALAR
(
"?"
,
0
,
"green"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 5.4
TEST
FlowStructureIndicators
()
{
HANDLE
(
ex5_4
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sequence"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"mapping"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sky"
);
EXPECT_SCALAR
(
"?"
,
0
,
"blue"
);
EXPECT_SCALAR
(
"?"
,
0
,
"sea"
);
EXPECT_SCALAR
(
"?"
,
0
,
"green"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 5.5
TEST
CommentIndicator
()
{
HANDLE
(
ex5_5
);
DONE
();
}
// 5.6
TEST
NodePropertyIndicators
()
{
HANDLE
(
ex5_6
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"anchored"
);
EXPECT_SCALAR
(
"!local"
,
1
,
"value"
);
EXPECT_SCALAR
(
"?"
,
0
,
"alias"
);
EXPECT_ALIAS
(
1
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 5.7
TEST
BlockScalarIndicators
()
{
HANDLE
(
ex5_7
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"literal"
);
EXPECT_SCALAR
(
"!"
,
0
,
"some
\n
text
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"folded"
);
EXPECT_SCALAR
(
"!"
,
0
,
"some text
\n
"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 5.8
TEST
QuotedScalarIndicators
()
{
HANDLE
(
ex5_8
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"single"
);
EXPECT_SCALAR
(
"!"
,
0
,
"text"
);
EXPECT_SCALAR
(
"?"
,
0
,
"double"
);
EXPECT_SCALAR
(
"!"
,
0
,
"text"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// TODO: 5.9 directive
// TODO: 5.10 reserved indicator
// 5.11
TEST
LineBreakCharacters
()
{
HANDLE
(
ex5_11
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"Line break (no glyph)
\n
"
"Line break (glyphed)
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 5.12
TEST
TabsAndSpaces
()
{
HANDLE
(
ex5_12
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"quoted"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Quoted
\t
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"block"
);
EXPECT_SCALAR
(
"!"
,
0
,
"void main() {
\n
"
"
\t
printf(
\"
Hello, world!
\\
n
\"
);
\n
"
"}"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 5.13
TEST
EscapedCharacters
()
{
HANDLE
(
ex5_13
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"Fun with
\x5C
\x22
\x07
\x08
\x1B
\x0C
\x0A
\x0D
\x09
\x0B
"
+
std
::
string
(
"
\x00
"
,
1
)
+
"
\x20
\xA0
\x85
\xe2\x80\xa8
\xe2\x80\xa9
A A A"
);
EXPECT_DOC_END
();
DONE
();
}
// 5.14
TEST
InvalidEscapedCharacters
()
{
try
{
HANDLE
(
ex5_14
);
}
catch
(
const
YAML
::
ParserException
&
e
)
{
YAML_ASSERT
(
e
.
msg
==
std
::
string
(
YAML
::
ErrorMsg
::
INVALID_ESCAPE
)
+
"c"
);
return
true
;
}
return
" no exception caught"
;
}
// 6.1
TEST
IndentationSpaces
()
{
HANDLE
(
ex6_1
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Not indented"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"By one space"
);
EXPECT_SCALAR
(
"!"
,
0
,
"By four
\n
spaces
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Flow style"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"By two"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Also by two"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Still by two"
);
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.2
TEST
IndentationIndicators
()
{
HANDLE
(
ex6_2
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"a"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"b"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"c"
);
EXPECT_SCALAR
(
"?"
,
0
,
"d"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.3
TEST
SeparationSpaces
()
{
HANDLE
(
ex6_3
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_MAP_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"baz"
);
EXPECT_SCALAR
(
"?"
,
0
,
"baz"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.4
TEST
LinePrefixes
()
{
HANDLE
(
ex6_4
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"plain"
);
EXPECT_SCALAR
(
"?"
,
0
,
"text lines"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quoted"
);
EXPECT_SCALAR
(
"!"
,
0
,
"text lines"
);
EXPECT_SCALAR
(
"?"
,
0
,
"block"
);
EXPECT_SCALAR
(
"!"
,
0
,
"text
\n
\t
lines
\n
"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.5
TEST
EmptyLines
()
{
HANDLE
(
ex6_5
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Folding"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Empty line
\n
as a line feed"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Chomping"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Clipped empty lines
\n
"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.6
TEST
LineFolding
()
{
HANDLE
(
ex6_6
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"trimmed
\n\n\n
as space"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.7
TEST
BlockFolding
()
{
HANDLE
(
ex6_7
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"foo
\n\n\t
bar
\n\n
baz
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.8
TEST
FlowFolding
()
{
HANDLE
(
ex6_8
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
" foo
\n
bar
\n
baz "
);
EXPECT_DOC_END
();
DONE
();
}
// 6.9
TEST
SeparatedComment
()
{
HANDLE
(
ex6_9
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.10
TEST
CommentLines
()
{
HANDLE
(
ex6_10
);
DONE
();
}
// 6.11
TEST
MultiLineComments
()
{
HANDLE
(
ex6_11
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.12
TEST
SeparationSpacesII
()
{
HANDLE
(
ex6_12
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"first"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy"
);
EXPECT_SCALAR
(
"?"
,
0
,
"last"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sosa"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.13
TEST
ReservedDirectives
()
{
HANDLE
(
ex6_13
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"foo"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.14
TEST
YAMLDirective
()
{
HANDLE
(
ex6_14
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"foo"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.15
TEST
InvalidRepeatedYAMLDirective
()
{
try
{
HANDLE
(
ex6_15
);
}
catch
(
const
YAML
::
ParserException
&
e
)
{
if
(
e
.
msg
==
YAML
::
ErrorMsg
::
REPEATED_YAML_DIRECTIVE
)
return
true
;
throw
;
}
return
" No exception was thrown"
;
}
// 6.16
TEST
TagDirective
()
{
HANDLE
(
ex6_16
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"foo"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.17
TEST
InvalidRepeatedTagDirective
()
{
try
{
HANDLE
(
ex6_17
);
}
catch
(
const
YAML
::
ParserException
&
e
)
{
if
(
e
.
msg
==
YAML
::
ErrorMsg
::
REPEATED_TAG_DIRECTIVE
)
return
true
;
throw
;
}
return
" No exception was thrown"
;
}
// 6.18
TEST
PrimaryTagHandle
()
{
HANDLE
(
ex6_18
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!foo"
,
0
,
"bar"
);
EXPECT_DOC_END
();
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"tag:example.com,2000:app/foo"
,
0
,
"bar"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.19
TEST
SecondaryTagHandle
()
{
HANDLE
(
ex6_19
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"tag:example.com,2000:app/int"
,
0
,
"1 - 3"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.20
TEST
TagHandles
()
{
HANDLE
(
ex6_20
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"tag:example.com,2000:app/foo"
,
0
,
"bar"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.21
TEST
LocalTagPrefix
()
{
HANDLE
(
ex6_21
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!my-light"
,
0
,
"fluorescent"
);
EXPECT_DOC_END
();
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!my-light"
,
0
,
"green"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.22
TEST
GlobalTagPrefix
()
{
HANDLE
(
ex6_22
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"tag:example.com,2000:app/foo"
,
0
,
"bar"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.23
TEST
NodeProperties
()
{
HANDLE
(
ex6_23
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
1
,
"foo"
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"bar"
);
EXPECT_SCALAR
(
"?"
,
2
,
"baz"
);
EXPECT_ALIAS
(
1
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.24
TEST
VerbatimTags
()
{
HANDLE
(
ex6_24
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"!bar"
,
0
,
"baz"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.25
TEST
InvalidVerbatimTags
()
{
HANDLE
(
ex6_25
);
return
" not implemented yet"
;
}
// 6.26
TEST
TagShorthands
()
{
HANDLE
(
ex6_26
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!local"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"bar"
);
EXPECT_SCALAR
(
"tag:example.com,2000:app/tag%21"
,
0
,
"baz"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.27
TEST
InvalidTagShorthands
()
{
bool
threw
=
false
;
try
{
HANDLE
(
ex6_27a
);
}
catch
(
const
YAML
::
ParserException
&
e
)
{
threw
=
true
;
if
(
e
.
msg
!=
YAML
::
ErrorMsg
::
TAG_WITH_NO_SUFFIX
)
throw
;
}
if
(
!
threw
)
return
" No exception was thrown for a tag with no suffix"
;
HANDLE
(
ex6_27b
);
// TODO: should we reject this one (since !h! is not declared)?
return
" not implemented yet"
;
}
// 6.28
TEST
NonSpecificTags
()
{
HANDLE
(
ex6_28
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"12"
);
EXPECT_SCALAR
(
"?"
,
0
,
"12"
);
EXPECT_SCALAR
(
"!"
,
0
,
"12"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.29
TEST
NodeAnchors
()
{
HANDLE
(
ex6_29
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"First occurrence"
);
EXPECT_SCALAR
(
"?"
,
1
,
"Value"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Second occurrence"
);
EXPECT_ALIAS
(
1
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.1
TEST
AliasNodes
()
{
HANDLE
(
ex7_1
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"First occurrence"
);
EXPECT_SCALAR
(
"?"
,
1
,
"Foo"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Second occurrence"
);
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"Override anchor"
);
EXPECT_SCALAR
(
"?"
,
2
,
"Bar"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Reuse anchor"
);
EXPECT_ALIAS
(
2
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.2
TEST
EmptyNodes
()
{
HANDLE
(
ex7_2
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
""
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
""
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.3
TEST
CompletelyEmptyNodes
()
{
HANDLE
(
ex7_3
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_NULL
(
0
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.4
TEST
DoubleQuotedImplicitKeys
()
{
HANDLE
(
ex7_4
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"implicit block key"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"implicit flow key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.5
TEST
DoubleQuotedLineBreaks
()
{
HANDLE
(
ex7_5
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"folded to a space,
\n
to a line feed, or
\t
\t
non-content"
);
EXPECT_DOC_END
();
DONE
();
}
// 7.6
TEST
DoubleQuotedLines
()
{
HANDLE
(
ex7_6
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
" 1st non-empty
\n
2nd non-empty 3rd non-empty "
);
EXPECT_DOC_END
();
DONE
();
}
// 7.7
TEST
SingleQuotedCharacters
()
{
HANDLE
(
ex7_7
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"here's to
\"
quotes
\"
"
);
EXPECT_DOC_END
();
DONE
();
}
// 7.8
TEST
SingleQuotedImplicitKeys
()
{
HANDLE
(
ex7_8
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"implicit block key"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"implicit flow key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.9
TEST
SingleQuotedLines
()
{
HANDLE
(
ex7_9
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
" 1st non-empty
\n
2nd non-empty 3rd non-empty "
);
EXPECT_DOC_END
();
DONE
();
}
// 7.10
TEST
PlainCharacters
()
{
HANDLE
(
ex7_10
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"::vector"
);
EXPECT_SCALAR
(
"!"
,
0
,
": - ()"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Up, up, and away!"
);
EXPECT_SCALAR
(
"?"
,
0
,
"-123"
);
EXPECT_SCALAR
(
"?"
,
0
,
"http://example.com/foo#bar"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"::vector"
);
EXPECT_SCALAR
(
"!"
,
0
,
": - ()"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Up, up, and away!"
);
EXPECT_SCALAR
(
"?"
,
0
,
"-123"
);
EXPECT_SCALAR
(
"?"
,
0
,
"http://example.com/foo#bar"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.11
TEST
PlainImplicitKeys
()
{
HANDLE
(
ex7_11
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"implicit block key"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"implicit flow key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.12
TEST
PlainLines
()
{
HANDLE
(
ex7_12
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"1st non-empty
\n
2nd non-empty 3rd non-empty"
);
EXPECT_DOC_END
();
DONE
();
}
// 7.13
TEST
FlowSequence
()
{
HANDLE
(
ex7_13
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"three"
);
EXPECT_SCALAR
(
"?"
,
0
,
"four"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.14
TEST
FlowSequenceEntries
()
{
HANDLE
(
ex7_14
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"double quoted"
);
EXPECT_SCALAR
(
"!"
,
0
,
"single quoted"
);
EXPECT_SCALAR
(
"?"
,
0
,
"plain text"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"nested"
);
EXPECT_SEQ_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"single"
);
EXPECT_SCALAR
(
"?"
,
0
,
"pair"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.15
TEST
FlowMappings
()
{
HANDLE
(
ex7_15
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SCALAR
(
"?"
,
0
,
"three"
);
EXPECT_SCALAR
(
"?"
,
0
,
"four"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"five"
);
EXPECT_SCALAR
(
"?"
,
0
,
"six"
);
EXPECT_SCALAR
(
"?"
,
0
,
"seven"
);
EXPECT_SCALAR
(
"?"
,
0
,
"eight"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.16
TEST
FlowMappingEntries
()
{
HANDLE
(
ex7_16
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"explicit"
);
EXPECT_SCALAR
(
"?"
,
0
,
"entry"
);
EXPECT_SCALAR
(
"?"
,
0
,
"implicit"
);
EXPECT_SCALAR
(
"?"
,
0
,
"entry"
);
EXPECT_NULL
(
0
);
EXPECT_NULL
(
0
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.17
TEST
FlowMappingSeparateValues
()
{
HANDLE
(
ex7_17
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"unquoted"
);
EXPECT_SCALAR
(
"!"
,
0
,
"separate"
);
EXPECT_SCALAR
(
"?"
,
0
,
"http://foo.com"
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"omitted value"
);
EXPECT_NULL
(
0
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"omitted key"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.18
TEST
FlowMappingAdjacentValues
()
{
HANDLE
(
ex7_18
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"adjacent"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_SCALAR
(
"!"
,
0
,
"readable"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_SCALAR
(
"!"
,
0
,
"empty"
);
EXPECT_NULL
(
0
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.19
TEST
SinglePairFlowMappings
()
{
HANDLE
(
ex7_19
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.20
TEST
SinglePairExplicitEntry
()
{
HANDLE
(
ex7_20
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo bar"
);
EXPECT_SCALAR
(
"?"
,
0
,
"baz"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.21
TEST
SinglePairImplicitEntries
()
{
HANDLE
(
ex7_21
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"YAML"
);
EXPECT_SCALAR
(
"?"
,
0
,
"separate"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"empty key entry"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"JSON"
);
EXPECT_SCALAR
(
"?"
,
0
,
"like"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"adjacent"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.22
TEST
InvalidImplicitKeys
()
{
try
{
HANDLE
(
ex7_22
);
}
catch
(
const
YAML
::
Exception
&
e
)
{
if
(
e
.
msg
==
YAML
::
ErrorMsg
::
END_OF_SEQ_FLOW
)
return
true
;
throw
;
}
return
" no exception thrown"
;
}
// 7.23
TEST
FlowContent
()
{
HANDLE
(
ex7_23
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"a"
);
EXPECT_SCALAR
(
"?"
,
0
,
"b"
);
EXPECT_SEQ_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"a"
);
EXPECT_SCALAR
(
"?"
,
0
,
"b"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"!"
,
0
,
"a"
);
EXPECT_SCALAR
(
"!"
,
0
,
"b"
);
EXPECT_SCALAR
(
"?"
,
0
,
"c"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.24
TEST
FlowNodes
()
{
HANDLE
(
ex7_24
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"a"
);
EXPECT_SCALAR
(
"!"
,
0
,
"b"
);
EXPECT_SCALAR
(
"!"
,
1
,
"c"
);
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
""
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.1
TEST
BlockScalarHeader
()
{
HANDLE
(
ex8_1
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"literal
\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
" folded
\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
"keep
\n\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
" strip"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.2
TEST
BlockIndentationHeader
()
{
HANDLE
(
ex8_2
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"detected
\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\n\n
# detected
\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
" explicit
\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\t\n
detected
\n
"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.3
TEST
InvalidBlockScalarIndentationIndicators
()
{
{
bool
threw
=
false
;
try
{
HANDLE
(
ex8_3a
);
}
catch
(
const
YAML
::
Exception
&
e
)
{
if
(
e
.
msg
!=
YAML
::
ErrorMsg
::
END_OF_SEQ
)
throw
;
threw
=
true
;
}
if
(
!
threw
)
return
" no exception thrown for less indented auto-detecting indentation for a literal block scalar"
;
}
{
bool
threw
=
false
;
try
{
HANDLE
(
ex8_3b
);
}
catch
(
const
YAML
::
Exception
&
e
)
{
if
(
e
.
msg
!=
YAML
::
ErrorMsg
::
END_OF_SEQ
)
throw
;
threw
=
true
;
}
if
(
!
threw
)
return
" no exception thrown for less indented auto-detecting indentation for a folded block scalar"
;
}
{
bool
threw
=
false
;
try
{
HANDLE
(
ex8_3c
);
}
catch
(
const
YAML
::
Exception
&
e
)
{
if
(
e
.
msg
!=
YAML
::
ErrorMsg
::
END_OF_SEQ
)
throw
;
threw
=
true
;
}
if
(
!
threw
)
return
" no exception thrown for less indented explicit indentation for a literal block scalar"
;
}
return
true
;
}
// 8.4
TEST
ChompingFinalLineBreak
()
{
HANDLE
(
ex8_4
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"strip"
);
EXPECT_SCALAR
(
"!"
,
0
,
"text"
);
EXPECT_SCALAR
(
"?"
,
0
,
"clip"
);
EXPECT_SCALAR
(
"!"
,
0
,
"text
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"keep"
);
EXPECT_SCALAR
(
"!"
,
0
,
"text
\n
"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.5
TEST
ChompingTrailingLines
()
{
HANDLE
(
ex8_5
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"strip"
);
EXPECT_SCALAR
(
"!"
,
0
,
"# text"
);
EXPECT_SCALAR
(
"?"
,
0
,
"clip"
);
EXPECT_SCALAR
(
"!"
,
0
,
"# text
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"keep"
);
EXPECT_SCALAR
(
"!"
,
0
,
"# text
\n
"
);
// Note: I believe this is a bug in the YAML spec - it should be "# text\n\n"
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.6
TEST
EmptyScalarChomping
()
{
HANDLE
(
ex8_6
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"strip"
);
EXPECT_SCALAR
(
"!"
,
0
,
""
);
EXPECT_SCALAR
(
"?"
,
0
,
"clip"
);
EXPECT_SCALAR
(
"!"
,
0
,
""
);
EXPECT_SCALAR
(
"?"
,
0
,
"keep"
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\n
"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.7
TEST
LiteralScalar
()
{
HANDLE
(
ex8_7
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"literal
\n\t
text
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 8.8
TEST
LiteralContent
()
{
HANDLE
(
ex8_8
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\n\n
literal
\n
\n\n
text
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 8.9
TEST
FoldedScalar
()
{
HANDLE
(
ex8_9
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"folded text
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 8.10
TEST
FoldedLines
()
{
HANDLE
(
ex8_10
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\n
folded line
\n
next line
\n
* bullet
\n\n
* list
\n
* lines
\n\n
last line
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 8.11
TEST
MoreIndentedLines
()
{
HANDLE
(
ex8_11
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\n
folded line
\n
next line
\n
* bullet
\n\n
* list
\n
* lines
\n\n
last line
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 8.12
TEST
EmptySeparationLines
()
{
HANDLE
(
ex8_12
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\n
folded line
\n
next line
\n
* bullet
\n\n
* list
\n
* lines
\n\n
last line
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 8.13
TEST
FinalEmptyLines
()
{
HANDLE
(
ex8_13
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\n
folded line
\n
next line
\n
* bullet
\n\n
* list
\n
* lines
\n\n
last line
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 8.14
TEST
BlockSequence
()
{
HANDLE
(
ex8_14
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"block sequence"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SCALAR
(
"?"
,
0
,
"three"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.15
TEST
BlockSequenceEntryTypes
()
{
HANDLE
(
ex8_15
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"block node
\n
"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SEQ_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.16
TEST
BlockMappings
()
{
HANDLE
(
ex8_16
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"block mapping"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.17
TEST
ExplicitBlockMappingEntries
()
{
HANDLE
(
ex8_17
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"explicit key"
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"block key
\n
"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.18
TEST
ImplicitBlockMappingEntries
()
{
HANDLE
(
ex8_18
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"plain key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"in-line value"
);
EXPECT_NULL
(
0
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"quoted key"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"entry"
);
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.19
TEST
CompactBlockMappings
()
{
HANDLE
(
ex8_19
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sun"
);
EXPECT_SCALAR
(
"?"
,
0
,
"yellow"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"earth"
);
EXPECT_SCALAR
(
"?"
,
0
,
"blue"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"moon"
);
EXPECT_SCALAR
(
"?"
,
0
,
"white"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.20
TEST
BlockNodeTypes
()
{
HANDLE
(
ex8_20
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"flow in block"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Block scalar
\n
"
);
EXPECT_MAP_START
(
"tag:yaml.org,2002:map"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.21
TEST
BlockScalarNodes
()
{
HANDLE
(
ex8_21
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"literal"
);
EXPECT_SCALAR
(
"!"
,
0
,
"value"
);
// Note: I believe this is a bug in the YAML spec - it should be "value\n"
EXPECT_SCALAR
(
"?"
,
0
,
"folded"
);
EXPECT_SCALAR
(
"!foo"
,
0
,
"value"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.22
TEST
BlockCollectionNodes
()
{
HANDLE
(
ex8_22
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sequence"
);
EXPECT_SEQ_START
(
"tag:yaml.org,2002:seq"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"entry"
);
EXPECT_SEQ_START
(
"tag:yaml.org,2002:seq"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"nested"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"mapping"
);
EXPECT_MAP_START
(
"tag:yaml.org,2002:map"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
}
namespace
Spec
{
// 2.1
TEST
SeqScalars
()
{
HANDLE
(
ex2_1
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.2
TEST
MappingScalarsToScalars
()
{
HANDLE
(
ex2_2
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_SCALAR
(
"?"
,
0
,
"rbi"
);
EXPECT_SCALAR
(
"?"
,
0
,
"147"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.3
TEST
MappingScalarsToSequences
()
{
HANDLE
(
ex2_3
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"american"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Boston Red Sox"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Detroit Tigers"
);
EXPECT_SCALAR
(
"?"
,
0
,
"New York Yankees"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"national"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"New York Mets"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Chicago Cubs"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Atlanta Braves"
);
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.4
TEST
SequenceOfMappings
()
{
HANDLE
(
ex2_4
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"name"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"name"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"63"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.288"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.5
TEST
SequenceOfSequences
()
{
HANDLE
(
ex2_5
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"name"
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"63"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.288"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.6
TEST
MappingOfMappings
()
{
HANDLE
(
ex2_6
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"63"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.288"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.7
TEST
TwoDocumentsInAStream
()
{
HANDLE
(
ex2_7
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Chicago Cubs"
);
EXPECT_SCALAR
(
"?"
,
0
,
"St Louis Cardinals"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.8
TEST
PlayByPlayFeed
()
{
HANDLE
(
ex2_8
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"time"
);
EXPECT_SCALAR
(
"?"
,
0
,
"20:03:20"
);
EXPECT_SCALAR
(
"?"
,
0
,
"player"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"action"
);
EXPECT_SCALAR
(
"?"
,
0
,
"strike (miss)"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"time"
);
EXPECT_SCALAR
(
"?"
,
0
,
"20:03:47"
);
EXPECT_SCALAR
(
"?"
,
0
,
"player"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"action"
);
EXPECT_SCALAR
(
"?"
,
0
,
"grand slam"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.9
TEST
SingleDocumentWithTwoComments
()
{
HANDLE
(
ex2_9
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"rbi"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.10
TEST
SimpleAnchor
()
{
HANDLE
(
ex2_10
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
1
,
"Sammy Sosa"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"rbi"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.11
TEST
MappingBetweenSequences
()
{
HANDLE
(
ex2_11
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Detroit Tigers"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Chicago cubs"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-07-23"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"New York Yankees"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Atlanta Braves"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-07-02"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-08-12"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-08-14"
);
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.12
TEST
CompactNestedMapping
()
{
HANDLE
(
ex2_12
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"item"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Super Hoop"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
EXPECT_SCALAR
(
"?"
,
0
,
"1"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"item"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Basketball"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
EXPECT_SCALAR
(
"?"
,
0
,
"4"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"item"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Big Shoes"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
EXPECT_SCALAR
(
"?"
,
0
,
"1"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.13
TEST
InLiteralsNewlinesArePreserved
()
{
HANDLE
(
ex2_13
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\\
//||
\\
/||
\n
"
"// || ||__"
);
EXPECT_DOC_END
();
DONE
();
}
// 2.14
TEST
InFoldedScalarsNewlinesBecomeSpaces
()
{
HANDLE
(
ex2_14
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"Mark McGwire's year was crippled by a knee injury."
);
EXPECT_DOC_END
();
DONE
();
}
// 2.15
TEST
FoldedNewlinesArePreservedForMoreIndentedAndBlankLines
()
{
HANDLE
(
ex2_15
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"Sammy Sosa completed another fine season with great stats.
\n
"
"
\n
"
" 63 Home Runs
\n
"
" 0.288 Batting Average
\n
"
"
\n
"
"What a year!"
);
EXPECT_DOC_END
();
DONE
();
}
// 2.16
TEST
IndentationDeterminesScope
()
{
HANDLE
(
ex2_16
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"name"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"accomplishment"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Mark set a major league home run record in 1998.
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"stats"
);
EXPECT_SCALAR
(
"!"
,
0
,
"65 Home Runs
\n
"
"0.278 Batting Average
\n
"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.17
TEST
QuotedScalars
()
{
HANDLE
(
ex2_17
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"unicode"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Sosa did fine.\u263A"
);
EXPECT_SCALAR
(
"?"
,
0
,
"control"
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\b
1998
\t
1999
\t
2000
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"hex esc"
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\x0d\x0a
is
\r\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"single"
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\"
Howdy!
\"
he cried."
);
EXPECT_SCALAR
(
"?"
,
0
,
"quoted"
);
EXPECT_SCALAR
(
"!"
,
0
,
" # Not a 'comment'."
);
EXPECT_SCALAR
(
"?"
,
0
,
"tie-fighter"
);
EXPECT_SCALAR
(
"!"
,
0
,
"|
\\
-*-/|"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.18
TEST
MultiLineFlowScalars
()
{
HANDLE
(
ex2_18
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"plain"
);
EXPECT_SCALAR
(
"?"
,
0
,
"This unquoted scalar spans many lines."
);
EXPECT_SCALAR
(
"?"
,
0
,
"quoted"
);
EXPECT_SCALAR
(
"!"
,
0
,
"So does this quoted scalar.
\n
"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// TODO: 2.19 - 2.22 schema tags
// 2.23
TEST
VariousExplicitTags
()
{
HANDLE
(
ex2_23
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"not-date"
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"2002-04-28"
);
EXPECT_SCALAR
(
"?"
,
0
,
"picture"
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:binary"
,
0
,
"R0lGODlhDAAMAIQAAP//9/X
\n
"
"17unp5WZmZgAAAOfn515eXv
\n
"
"Pz7Y6OjuDg4J+fn5OTk6enp
\n
"
"56enmleECcgggoBADs=
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"application specific tag"
);
EXPECT_SCALAR
(
"!something"
,
0
,
"The semantics of the tag
\n
"
"above may be different for
\n
"
"different documents."
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.24
TEST
GlobalTags
()
{
HANDLE
(
ex2_24
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"tag:clarkevans.com,2002:shape"
,
0
);
EXPECT_MAP_START
(
"tag:clarkevans.com,2002:circle"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"center"
);
EXPECT_MAP_START
(
"?"
,
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"x"
);
EXPECT_SCALAR
(
"?"
,
0
,
"73"
);
EXPECT_SCALAR
(
"?"
,
0
,
"y"
);
EXPECT_SCALAR
(
"?"
,
0
,
"129"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"radius"
);
EXPECT_SCALAR
(
"?"
,
0
,
"7"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"tag:clarkevans.com,2002:line"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"start"
);
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"finish"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"x"
);
EXPECT_SCALAR
(
"?"
,
0
,
"89"
);
EXPECT_SCALAR
(
"?"
,
0
,
"y"
);
EXPECT_SCALAR
(
"?"
,
0
,
"102"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"tag:clarkevans.com,2002:label"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"start"
);
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"color"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0xFFEEBB"
);
EXPECT_SCALAR
(
"?"
,
0
,
"text"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Pretty vector drawing."
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.25
TEST
UnorderedSets
()
{
HANDLE
(
ex2_25
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"tag:yaml.org,2002:set"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_NULL
(
0
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.26
TEST
OrderedMappings
()
{
HANDLE
(
ex2_26
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"tag:yaml.org,2002:omap"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Mark McGwire"
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy Sosa"
);
EXPECT_SCALAR
(
"?"
,
0
,
"63"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Ken Griffey"
);
EXPECT_SCALAR
(
"?"
,
0
,
"58"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.27
TEST
Invoice
()
{
HANDLE
(
ex2_27
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"tag:clarkevans.com,2002:invoice"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"invoice"
);
EXPECT_SCALAR
(
"?"
,
0
,
"34843"
);
EXPECT_SCALAR
(
"?"
,
0
,
"date"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-01-23"
);
EXPECT_SCALAR
(
"?"
,
0
,
"bill-to"
);
EXPECT_MAP_START
(
"?"
,
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"given"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Chris"
);
EXPECT_SCALAR
(
"?"
,
0
,
"family"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Dumars"
);
EXPECT_SCALAR
(
"?"
,
0
,
"address"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"lines"
);
EXPECT_SCALAR
(
"!"
,
0
,
"458 Walkman Dr.
\n
"
"Suite #292
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"city"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Royal Oak"
);
EXPECT_SCALAR
(
"?"
,
0
,
"state"
);
EXPECT_SCALAR
(
"?"
,
0
,
"MI"
);
EXPECT_SCALAR
(
"?"
,
0
,
"postal"
);
EXPECT_SCALAR
(
"?"
,
0
,
"48046"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"ship-to"
);
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"product"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sku"
);
EXPECT_SCALAR
(
"?"
,
0
,
"BL394D"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
EXPECT_SCALAR
(
"?"
,
0
,
"4"
);
EXPECT_SCALAR
(
"?"
,
0
,
"description"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Basketball"
);
EXPECT_SCALAR
(
"?"
,
0
,
"price"
);
EXPECT_SCALAR
(
"?"
,
0
,
"450.00"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sku"
);
EXPECT_SCALAR
(
"?"
,
0
,
"BL4438H"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quantity"
);
EXPECT_SCALAR
(
"?"
,
0
,
"1"
);
EXPECT_SCALAR
(
"?"
,
0
,
"description"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Super Hoop"
);
EXPECT_SCALAR
(
"?"
,
0
,
"price"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2392.00"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"tax"
);
EXPECT_SCALAR
(
"?"
,
0
,
"251.42"
);
EXPECT_SCALAR
(
"?"
,
0
,
"total"
);
EXPECT_SCALAR
(
"?"
,
0
,
"4443.52"
);
EXPECT_SCALAR
(
"?"
,
0
,
"comments"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338."
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 2.28
TEST
LogFile
()
{
HANDLE
(
ex2_28
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Time"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-11-23 15:01:42 -5"
);
EXPECT_SCALAR
(
"?"
,
0
,
"User"
);
EXPECT_SCALAR
(
"?"
,
0
,
"ed"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Warning"
);
EXPECT_SCALAR
(
"?"
,
0
,
"This is an error message for the log file"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Time"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-11-23 15:02:31 -5"
);
EXPECT_SCALAR
(
"?"
,
0
,
"User"
);
EXPECT_SCALAR
(
"?"
,
0
,
"ed"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Warning"
);
EXPECT_SCALAR
(
"?"
,
0
,
"A slightly different error message."
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Date"
);
EXPECT_SCALAR
(
"?"
,
0
,
"2001-11-23 15:03:17 -5"
);
EXPECT_SCALAR
(
"?"
,
0
,
"User"
);
EXPECT_SCALAR
(
"?"
,
0
,
"ed"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Fatal"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Unknown variable
\"
bar
\"
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Stack"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"file"
);
EXPECT_SCALAR
(
"?"
,
0
,
"TopClass.py"
);
EXPECT_SCALAR
(
"?"
,
0
,
"line"
);
EXPECT_SCALAR
(
"?"
,
0
,
"23"
);
EXPECT_SCALAR
(
"?"
,
0
,
"code"
);
EXPECT_SCALAR
(
"!"
,
0
,
"x = MoreObject(
\"
345
\\
n
\"
)
\n
"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"file"
);
EXPECT_SCALAR
(
"?"
,
0
,
"MoreClass.py"
);
EXPECT_SCALAR
(
"?"
,
0
,
"line"
);
EXPECT_SCALAR
(
"?"
,
0
,
"58"
);
EXPECT_SCALAR
(
"?"
,
0
,
"code"
);
EXPECT_SCALAR
(
"!"
,
0
,
"foo = bar"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// TODO: 5.1 - 5.2 BOM
// 5.3
TEST
BlockStructureIndicators
()
{
HANDLE
(
ex5_3
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sequence"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"mapping"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sky"
);
EXPECT_SCALAR
(
"?"
,
0
,
"blue"
);
EXPECT_SCALAR
(
"?"
,
0
,
"sea"
);
EXPECT_SCALAR
(
"?"
,
0
,
"green"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 5.4
TEST
FlowStructureIndicators
()
{
HANDLE
(
ex5_4
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sequence"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"mapping"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sky"
);
EXPECT_SCALAR
(
"?"
,
0
,
"blue"
);
EXPECT_SCALAR
(
"?"
,
0
,
"sea"
);
EXPECT_SCALAR
(
"?"
,
0
,
"green"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 5.5
TEST
CommentIndicator
()
{
HANDLE
(
ex5_5
);
DONE
();
}
// 5.6
TEST
NodePropertyIndicators
()
{
HANDLE
(
ex5_6
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"anchored"
);
EXPECT_SCALAR
(
"!local"
,
1
,
"value"
);
EXPECT_SCALAR
(
"?"
,
0
,
"alias"
);
EXPECT_ALIAS
(
1
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 5.7
TEST
BlockScalarIndicators
()
{
HANDLE
(
ex5_7
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"literal"
);
EXPECT_SCALAR
(
"!"
,
0
,
"some
\n
text
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"folded"
);
EXPECT_SCALAR
(
"!"
,
0
,
"some text
\n
"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 5.8
TEST
QuotedScalarIndicators
()
{
HANDLE
(
ex5_8
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"single"
);
EXPECT_SCALAR
(
"!"
,
0
,
"text"
);
EXPECT_SCALAR
(
"?"
,
0
,
"double"
);
EXPECT_SCALAR
(
"!"
,
0
,
"text"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// TODO: 5.9 directive
// TODO: 5.10 reserved indicator
// 5.11
TEST
LineBreakCharacters
()
{
HANDLE
(
ex5_11
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"Line break (no glyph)
\n
"
"Line break (glyphed)
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 5.12
TEST
TabsAndSpaces
()
{
HANDLE
(
ex5_12
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"quoted"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Quoted
\t
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"block"
);
EXPECT_SCALAR
(
"!"
,
0
,
"void main() {
\n
"
"
\t
printf(
\"
Hello, world!
\\
n
\"
);
\n
"
"}"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 5.13
TEST
EscapedCharacters
()
{
HANDLE
(
ex5_13
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"Fun with
\x5C
\x22
\x07
\x08
\x1B
\x0C
\x0A
\x0D
\x09
\x0B
"
+
std
::
string
(
"
\x00
"
,
1
)
+
"
\x20
\xA0
\x85
\xe2\x80\xa8
\xe2\x80\xa9
A A A"
);
EXPECT_DOC_END
();
DONE
();
}
// 5.14
TEST
InvalidEscapedCharacters
()
{
try
{
HANDLE
(
ex5_14
);
}
catch
(
const
YAML
::
ParserException
&
e
)
{
YAML_ASSERT
(
e
.
msg
==
std
::
string
(
YAML
::
ErrorMsg
::
INVALID_ESCAPE
)
+
"c"
);
return
true
;
}
return
" no exception caught"
;
}
// 6.1
TEST
IndentationSpaces
()
{
HANDLE
(
ex6_1
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Not indented"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"By one space"
);
EXPECT_SCALAR
(
"!"
,
0
,
"By four
\n
spaces
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Flow style"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"By two"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Also by two"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Still by two"
);
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.2
TEST
IndentationIndicators
()
{
HANDLE
(
ex6_2
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"a"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"b"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"c"
);
EXPECT_SCALAR
(
"?"
,
0
,
"d"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.3
TEST
SeparationSpaces
()
{
HANDLE
(
ex6_3
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_MAP_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"baz"
);
EXPECT_SCALAR
(
"?"
,
0
,
"baz"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.4
TEST
LinePrefixes
()
{
HANDLE
(
ex6_4
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"plain"
);
EXPECT_SCALAR
(
"?"
,
0
,
"text lines"
);
EXPECT_SCALAR
(
"?"
,
0
,
"quoted"
);
EXPECT_SCALAR
(
"!"
,
0
,
"text lines"
);
EXPECT_SCALAR
(
"?"
,
0
,
"block"
);
EXPECT_SCALAR
(
"!"
,
0
,
"text
\n
\t
lines
\n
"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.5
TEST
EmptyLines
()
{
HANDLE
(
ex6_5
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"Folding"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Empty line
\n
as a line feed"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Chomping"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Clipped empty lines
\n
"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.6
TEST
LineFolding
()
{
HANDLE
(
ex6_6
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"trimmed
\n\n\n
as space"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.7
TEST
BlockFolding
()
{
HANDLE
(
ex6_7
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"foo
\n\n\t
bar
\n\n
baz
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.8
TEST
FlowFolding
()
{
HANDLE
(
ex6_8
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
" foo
\n
bar
\n
baz "
);
EXPECT_DOC_END
();
DONE
();
}
// 6.9
TEST
SeparatedComment
()
{
HANDLE
(
ex6_9
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.10
TEST
CommentLines
()
{
HANDLE
(
ex6_10
);
DONE
();
}
// 6.11
TEST
MultiLineComments
()
{
HANDLE
(
ex6_11
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.12
TEST
SeparationSpacesII
()
{
HANDLE
(
ex6_12
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"first"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sammy"
);
EXPECT_SCALAR
(
"?"
,
0
,
"last"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Sosa"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"hr"
);
EXPECT_SCALAR
(
"?"
,
0
,
"65"
);
EXPECT_SCALAR
(
"?"
,
0
,
"avg"
);
EXPECT_SCALAR
(
"?"
,
0
,
"0.278"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.13
TEST
ReservedDirectives
()
{
HANDLE
(
ex6_13
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"foo"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.14
TEST
YAMLDirective
()
{
HANDLE
(
ex6_14
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"foo"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.15
TEST
InvalidRepeatedYAMLDirective
()
{
try
{
HANDLE
(
ex6_15
);
}
catch
(
const
YAML
::
ParserException
&
e
)
{
if
(
e
.
msg
==
YAML
::
ErrorMsg
::
REPEATED_YAML_DIRECTIVE
)
return
true
;
throw
;
}
return
" No exception was thrown"
;
}
// 6.16
TEST
TagDirective
()
{
HANDLE
(
ex6_16
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"foo"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.17
TEST
InvalidRepeatedTagDirective
()
{
try
{
HANDLE
(
ex6_17
);
}
catch
(
const
YAML
::
ParserException
&
e
)
{
if
(
e
.
msg
==
YAML
::
ErrorMsg
::
REPEATED_TAG_DIRECTIVE
)
return
true
;
throw
;
}
return
" No exception was thrown"
;
}
// 6.18
TEST
PrimaryTagHandle
()
{
HANDLE
(
ex6_18
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!foo"
,
0
,
"bar"
);
EXPECT_DOC_END
();
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"tag:example.com,2000:app/foo"
,
0
,
"bar"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.19
TEST
SecondaryTagHandle
()
{
HANDLE
(
ex6_19
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"tag:example.com,2000:app/int"
,
0
,
"1 - 3"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.20
TEST
TagHandles
()
{
HANDLE
(
ex6_20
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"tag:example.com,2000:app/foo"
,
0
,
"bar"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.21
TEST
LocalTagPrefix
()
{
HANDLE
(
ex6_21
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!my-light"
,
0
,
"fluorescent"
);
EXPECT_DOC_END
();
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!my-light"
,
0
,
"green"
);
EXPECT_DOC_END
();
DONE
();
}
// 6.22
TEST
GlobalTagPrefix
()
{
HANDLE
(
ex6_22
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"tag:example.com,2000:app/foo"
,
0
,
"bar"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.23
TEST
NodeProperties
()
{
HANDLE
(
ex6_23
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
1
,
"foo"
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"bar"
);
EXPECT_SCALAR
(
"?"
,
2
,
"baz"
);
EXPECT_ALIAS
(
1
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.24
TEST
VerbatimTags
()
{
HANDLE
(
ex6_24
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"!bar"
,
0
,
"baz"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.25
TEST
InvalidVerbatimTags
()
{
HANDLE
(
ex6_25
);
return
" not implemented yet"
;
}
// 6.26
TEST
TagShorthands
()
{
HANDLE
(
ex6_26
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!local"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"bar"
);
EXPECT_SCALAR
(
"tag:example.com,2000:app/tag%21"
,
0
,
"baz"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.27
TEST
InvalidTagShorthands
()
{
bool
threw
=
false
;
try
{
HANDLE
(
ex6_27a
);
}
catch
(
const
YAML
::
ParserException
&
e
)
{
threw
=
true
;
if
(
e
.
msg
!=
YAML
::
ErrorMsg
::
TAG_WITH_NO_SUFFIX
)
throw
;
}
if
(
!
threw
)
return
" No exception was thrown for a tag with no suffix"
;
HANDLE
(
ex6_27b
);
// TODO: should we reject this one (since !h! is not declared)?
return
" not implemented yet"
;
}
// 6.28
TEST
NonSpecificTags
()
{
HANDLE
(
ex6_28
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"12"
);
EXPECT_SCALAR
(
"?"
,
0
,
"12"
);
EXPECT_SCALAR
(
"!"
,
0
,
"12"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 6.29
TEST
NodeAnchors
()
{
HANDLE
(
ex6_29
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"First occurrence"
);
EXPECT_SCALAR
(
"?"
,
1
,
"Value"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Second occurrence"
);
EXPECT_ALIAS
(
1
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.1
TEST
AliasNodes
()
{
HANDLE
(
ex7_1
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"First occurrence"
);
EXPECT_SCALAR
(
"?"
,
1
,
"Foo"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Second occurrence"
);
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"?"
,
0
,
"Override anchor"
);
EXPECT_SCALAR
(
"?"
,
2
,
"Bar"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Reuse anchor"
);
EXPECT_ALIAS
(
2
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.2
TEST
EmptyNodes
()
{
HANDLE
(
ex7_2
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
""
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
""
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.3
TEST
CompletelyEmptyNodes
()
{
HANDLE
(
ex7_3
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_NULL
(
0
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.4
TEST
DoubleQuotedImplicitKeys
()
{
HANDLE
(
ex7_4
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"implicit block key"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"implicit flow key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.5
TEST
DoubleQuotedLineBreaks
()
{
HANDLE
(
ex7_5
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"folded to a space,
\n
to a line feed, or
\t
\t
non-content"
);
EXPECT_DOC_END
();
DONE
();
}
// 7.6
TEST
DoubleQuotedLines
()
{
HANDLE
(
ex7_6
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
" 1st non-empty
\n
2nd non-empty 3rd non-empty "
);
EXPECT_DOC_END
();
DONE
();
}
// 7.7
TEST
SingleQuotedCharacters
()
{
HANDLE
(
ex7_7
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"here's to
\"
quotes
\"
"
);
EXPECT_DOC_END
();
DONE
();
}
// 7.8
TEST
SingleQuotedImplicitKeys
()
{
HANDLE
(
ex7_8
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"implicit block key"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"implicit flow key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.9
TEST
SingleQuotedLines
()
{
HANDLE
(
ex7_9
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
" 1st non-empty
\n
2nd non-empty 3rd non-empty "
);
EXPECT_DOC_END
();
DONE
();
}
// 7.10
TEST
PlainCharacters
()
{
HANDLE
(
ex7_10
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"::vector"
);
EXPECT_SCALAR
(
"!"
,
0
,
": - ()"
);
EXPECT_SCALAR
(
"?"
,
0
,
"Up, up, and away!"
);
EXPECT_SCALAR
(
"?"
,
0
,
"-123"
);
EXPECT_SCALAR
(
"?"
,
0
,
"http://example.com/foo#bar"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"::vector"
);
EXPECT_SCALAR
(
"!"
,
0
,
": - ()"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Up, up, and away!"
);
EXPECT_SCALAR
(
"?"
,
0
,
"-123"
);
EXPECT_SCALAR
(
"?"
,
0
,
"http://example.com/foo#bar"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.11
TEST
PlainImplicitKeys
()
{
HANDLE
(
ex7_11
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"implicit block key"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"implicit flow key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.12
TEST
PlainLines
()
{
HANDLE
(
ex7_12
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"?"
,
0
,
"1st non-empty
\n
2nd non-empty 3rd non-empty"
);
EXPECT_DOC_END
();
DONE
();
}
// 7.13
TEST
FlowSequence
()
{
HANDLE
(
ex7_13
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"three"
);
EXPECT_SCALAR
(
"?"
,
0
,
"four"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.14
TEST
FlowSequenceEntries
()
{
HANDLE
(
ex7_14
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"double quoted"
);
EXPECT_SCALAR
(
"!"
,
0
,
"single quoted"
);
EXPECT_SCALAR
(
"?"
,
0
,
"plain text"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"nested"
);
EXPECT_SEQ_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"single"
);
EXPECT_SCALAR
(
"?"
,
0
,
"pair"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.15
TEST
FlowMappings
()
{
HANDLE
(
ex7_15
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SCALAR
(
"?"
,
0
,
"three"
);
EXPECT_SCALAR
(
"?"
,
0
,
"four"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"five"
);
EXPECT_SCALAR
(
"?"
,
0
,
"six"
);
EXPECT_SCALAR
(
"?"
,
0
,
"seven"
);
EXPECT_SCALAR
(
"?"
,
0
,
"eight"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.16
TEST
FlowMappingEntries
()
{
HANDLE
(
ex7_16
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"explicit"
);
EXPECT_SCALAR
(
"?"
,
0
,
"entry"
);
EXPECT_SCALAR
(
"?"
,
0
,
"implicit"
);
EXPECT_SCALAR
(
"?"
,
0
,
"entry"
);
EXPECT_NULL
(
0
);
EXPECT_NULL
(
0
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.17
TEST
FlowMappingSeparateValues
()
{
HANDLE
(
ex7_17
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"unquoted"
);
EXPECT_SCALAR
(
"!"
,
0
,
"separate"
);
EXPECT_SCALAR
(
"?"
,
0
,
"http://foo.com"
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"omitted value"
);
EXPECT_NULL
(
0
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"omitted key"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.18
TEST
FlowMappingAdjacentValues
()
{
HANDLE
(
ex7_18
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"adjacent"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_SCALAR
(
"!"
,
0
,
"readable"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_SCALAR
(
"!"
,
0
,
"empty"
);
EXPECT_NULL
(
0
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.19
TEST
SinglePairFlowMappings
()
{
HANDLE
(
ex7_19
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.20
TEST
SinglePairExplicitEntry
()
{
HANDLE
(
ex7_20
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo bar"
);
EXPECT_SCALAR
(
"?"
,
0
,
"baz"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.21
TEST
SinglePairImplicitEntries
()
{
HANDLE
(
ex7_21
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"YAML"
);
EXPECT_SCALAR
(
"?"
,
0
,
"separate"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"empty key entry"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"JSON"
);
EXPECT_SCALAR
(
"?"
,
0
,
"like"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"adjacent"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.22
TEST
InvalidImplicitKeys
()
{
try
{
HANDLE
(
ex7_22
);
}
catch
(
const
YAML
::
Exception
&
e
)
{
if
(
e
.
msg
==
YAML
::
ErrorMsg
::
END_OF_SEQ_FLOW
)
return
true
;
throw
;
}
return
" no exception thrown"
;
}
// 7.23
TEST
FlowContent
()
{
HANDLE
(
ex7_23
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"a"
);
EXPECT_SCALAR
(
"?"
,
0
,
"b"
);
EXPECT_SEQ_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"a"
);
EXPECT_SCALAR
(
"?"
,
0
,
"b"
);
EXPECT_MAP_END
();
EXPECT_SCALAR
(
"!"
,
0
,
"a"
);
EXPECT_SCALAR
(
"!"
,
0
,
"b"
);
EXPECT_SCALAR
(
"?"
,
0
,
"c"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 7.24
TEST
FlowNodes
()
{
HANDLE
(
ex7_24
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
"a"
);
EXPECT_SCALAR
(
"!"
,
0
,
"b"
);
EXPECT_SCALAR
(
"!"
,
1
,
"c"
);
EXPECT_ALIAS
(
1
);
EXPECT_SCALAR
(
"tag:yaml.org,2002:str"
,
0
,
""
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.1
TEST
BlockScalarHeader
()
{
HANDLE
(
ex8_1
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"literal
\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
" folded
\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
"keep
\n\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
" strip"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.2
TEST
BlockIndentationHeader
()
{
HANDLE
(
ex8_2
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"detected
\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\n\n
# detected
\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
" explicit
\n
"
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\t\n
detected
\n
"
);
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.3
TEST
InvalidBlockScalarIndentationIndicators
()
{
{
bool
threw
=
false
;
try
{
HANDLE
(
ex8_3a
);
}
catch
(
const
YAML
::
Exception
&
e
)
{
if
(
e
.
msg
!=
YAML
::
ErrorMsg
::
END_OF_SEQ
)
throw
;
threw
=
true
;
}
if
(
!
threw
)
return
" no exception thrown for less indented auto-detecting "
"indentation for a literal block scalar"
;
}
{
bool
threw
=
false
;
try
{
HANDLE
(
ex8_3b
);
}
catch
(
const
YAML
::
Exception
&
e
)
{
if
(
e
.
msg
!=
YAML
::
ErrorMsg
::
END_OF_SEQ
)
throw
;
threw
=
true
;
}
if
(
!
threw
)
return
" no exception thrown for less indented auto-detecting "
"indentation for a folded block scalar"
;
}
{
bool
threw
=
false
;
try
{
HANDLE
(
ex8_3c
);
}
catch
(
const
YAML
::
Exception
&
e
)
{
if
(
e
.
msg
!=
YAML
::
ErrorMsg
::
END_OF_SEQ
)
throw
;
threw
=
true
;
}
if
(
!
threw
)
return
" no exception thrown for less indented explicit indentation for "
"a literal block scalar"
;
}
return
true
;
}
// 8.4
TEST
ChompingFinalLineBreak
()
{
HANDLE
(
ex8_4
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"strip"
);
EXPECT_SCALAR
(
"!"
,
0
,
"text"
);
EXPECT_SCALAR
(
"?"
,
0
,
"clip"
);
EXPECT_SCALAR
(
"!"
,
0
,
"text
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"keep"
);
EXPECT_SCALAR
(
"!"
,
0
,
"text
\n
"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.5
TEST
ChompingTrailingLines
()
{
HANDLE
(
ex8_5
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"strip"
);
EXPECT_SCALAR
(
"!"
,
0
,
"# text"
);
EXPECT_SCALAR
(
"?"
,
0
,
"clip"
);
EXPECT_SCALAR
(
"!"
,
0
,
"# text
\n
"
);
EXPECT_SCALAR
(
"?"
,
0
,
"keep"
);
EXPECT_SCALAR
(
"!"
,
0
,
"# text
\n
"
);
// Note: I believe this is a bug in the
// YAML spec - it should be "# text\n\n"
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.6
TEST
EmptyScalarChomping
()
{
HANDLE
(
ex8_6
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"strip"
);
EXPECT_SCALAR
(
"!"
,
0
,
""
);
EXPECT_SCALAR
(
"?"
,
0
,
"clip"
);
EXPECT_SCALAR
(
"!"
,
0
,
""
);
EXPECT_SCALAR
(
"?"
,
0
,
"keep"
);
EXPECT_SCALAR
(
"!"
,
0
,
"
\n
"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.7
TEST
LiteralScalar
()
{
HANDLE
(
ex8_7
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"literal
\n\t
text
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 8.8
TEST
LiteralContent
()
{
HANDLE
(
ex8_8
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\n\n
literal
\n
\n\n
text
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 8.9
TEST
FoldedScalar
()
{
HANDLE
(
ex8_9
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"folded text
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 8.10
TEST
FoldedLines
()
{
HANDLE
(
ex8_10
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\n
folded line
\n
next line
\n
* bullet
\n\n
* list
\n
* "
"lines
\n\n
last line
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 8.11
TEST
MoreIndentedLines
()
{
HANDLE
(
ex8_11
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\n
folded line
\n
next line
\n
* bullet
\n\n
* list
\n
* "
"lines
\n\n
last line
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 8.12
TEST
EmptySeparationLines
()
{
HANDLE
(
ex8_12
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\n
folded line
\n
next line
\n
* bullet
\n\n
* list
\n
* "
"lines
\n\n
last line
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 8.13
TEST
FinalEmptyLines
()
{
HANDLE
(
ex8_13
);
EXPECT_DOC_START
();
EXPECT_SCALAR
(
"!"
,
0
,
"
\n
folded line
\n
next line
\n
* bullet
\n\n
* list
\n
* "
"lines
\n\n
last line
\n
"
);
EXPECT_DOC_END
();
DONE
();
}
// 8.14
TEST
BlockSequence
()
{
HANDLE
(
ex8_14
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"block sequence"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SCALAR
(
"?"
,
0
,
"three"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.15
TEST
BlockSequenceEntryTypes
()
{
HANDLE
(
ex8_15
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"block node
\n
"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SEQ_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.16
TEST
BlockMappings
()
{
HANDLE
(
ex8_16
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"block mapping"
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"value"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.17
TEST
ExplicitBlockMappingEntries
()
{
HANDLE
(
ex8_17
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"explicit key"
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"block key
\n
"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"one"
);
EXPECT_SCALAR
(
"?"
,
0
,
"two"
);
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.18
TEST
ImplicitBlockMappingEntries
()
{
HANDLE
(
ex8_18
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"plain key"
);
EXPECT_SCALAR
(
"?"
,
0
,
"in-line value"
);
EXPECT_NULL
(
0
);
EXPECT_NULL
(
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"quoted key"
);
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"entry"
);
EXPECT_SEQ_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.19
TEST
CompactBlockMappings
()
{
HANDLE
(
ex8_19
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sun"
);
EXPECT_SCALAR
(
"?"
,
0
,
"yellow"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"earth"
);
EXPECT_SCALAR
(
"?"
,
0
,
"blue"
);
EXPECT_MAP_END
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"moon"
);
EXPECT_SCALAR
(
"?"
,
0
,
"white"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.20
TEST
BlockNodeTypes
()
{
HANDLE
(
ex8_20
);
EXPECT_DOC_START
();
EXPECT_SEQ_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"!"
,
0
,
"flow in block"
);
EXPECT_SCALAR
(
"!"
,
0
,
"Block scalar
\n
"
);
EXPECT_MAP_START
(
"tag:yaml.org,2002:map"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_MAP_END
();
EXPECT_SEQ_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.21
TEST
BlockScalarNodes
()
{
HANDLE
(
ex8_21
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"literal"
);
EXPECT_SCALAR
(
"!"
,
0
,
"value"
);
// Note: I believe this is a bug in the YAML
// spec - it should be "value\n"
EXPECT_SCALAR
(
"?"
,
0
,
"folded"
);
EXPECT_SCALAR
(
"!foo"
,
0
,
"value"
);
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
// 8.22
TEST
BlockCollectionNodes
()
{
HANDLE
(
ex8_22
);
EXPECT_DOC_START
();
EXPECT_MAP_START
(
"?"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"sequence"
);
EXPECT_SEQ_START
(
"tag:yaml.org,2002:seq"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"entry"
);
EXPECT_SEQ_START
(
"tag:yaml.org,2002:seq"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"nested"
);
EXPECT_SEQ_END
();
EXPECT_SEQ_END
();
EXPECT_SCALAR
(
"?"
,
0
,
"mapping"
);
EXPECT_MAP_START
(
"tag:yaml.org,2002:map"
,
0
);
EXPECT_SCALAR
(
"?"
,
0
,
"foo"
);
EXPECT_SCALAR
(
"?"
,
0
,
"bar"
);
EXPECT_MAP_END
();
EXPECT_MAP_END
();
EXPECT_DOC_END
();
DONE
();
}
}
}
test/emittertests.h
View file @
3355bbb3
#ifndef EMITTERTESTS_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#define EMITTERTESTS_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#if defined(_MSC_VER) || (defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || (__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#if defined(_MSC_VER) || \
(defined(__GNUC__) && (__GNUC__ == 3 && __GNUC_MINOR__ >= 4) || \
(__GNUC__ >= 4)) // GCC supports "pragma once" correctly since 3.4
#pragma once
#endif
namespace
Test
{
bool
RunEmitterTests
();
bool
RunEmitterTests
();
}
#endif // EMITTERTESTS_H_62B23520_7C8E_11DE_8A39_0800200C9A66
#endif // EMITTERTESTS_H_62B23520_7C8E_11DE_8A39_0800200C9A66
Prev
1
2
3
4
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment