Moved token enums into Token scope

This commit is contained in:
Jesse Beder
2009-09-05 02:28:11 +00:00
parent 1db573dd30
commit 19673ff01a
8 changed files with 86 additions and 84 deletions

View File

@@ -59,8 +59,8 @@ namespace YAML
// split based on start token // split based on start token
switch(pScanner->peek().type) { switch(pScanner->peek().type) {
case TT_BLOCK_MAP_START: ParseBlock(pScanner, state); break; case Token::BLOCK_MAP_START: ParseBlock(pScanner, state); break;
case TT_FLOW_MAP_START: ParseFlow(pScanner, state); break; case Token::FLOW_MAP_START: ParseFlow(pScanner, state); break;
default: break; default: break;
} }
} }
@@ -75,10 +75,10 @@ namespace YAML
throw ParserException(Mark::null(), ErrorMsg::END_OF_MAP); throw ParserException(Mark::null(), ErrorMsg::END_OF_MAP);
Token token = pScanner->peek(); Token token = pScanner->peek();
if(token.type != TT_KEY && token.type != TT_VALUE && token.type != TT_BLOCK_MAP_END) if(token.type != Token::KEY && token.type != Token::VALUE && token.type != Token::BLOCK_MAP_END)
throw ParserException(token.mark, ErrorMsg::END_OF_MAP); throw ParserException(token.mark, ErrorMsg::END_OF_MAP);
if(token.type == TT_BLOCK_MAP_END) { if(token.type == Token::BLOCK_MAP_END) {
pScanner->pop(); pScanner->pop();
break; break;
} }
@@ -86,13 +86,13 @@ namespace YAML
std::auto_ptr <Node> pKey(new Node), pValue(new Node); std::auto_ptr <Node> pKey(new Node), pValue(new Node);
// grab key (if non-null) // grab key (if non-null)
if(token.type == TT_KEY) { if(token.type == Token::KEY) {
pScanner->pop(); pScanner->pop();
pKey->Parse(pScanner, state); pKey->Parse(pScanner, state);
} }
// now grab value (optional) // now grab value (optional)
if(!pScanner->empty() && pScanner->peek().type == TT_VALUE) { if(!pScanner->empty() && pScanner->peek().type == Token::VALUE) {
pScanner->pop(); pScanner->pop();
pValue->Parse(pScanner, state); pValue->Parse(pScanner, state);
} }
@@ -113,13 +113,13 @@ namespace YAML
Token& token = pScanner->peek(); Token& token = pScanner->peek();
// first check for end // first check for end
if(token.type == TT_FLOW_MAP_END) { if(token.type == Token::FLOW_MAP_END) {
pScanner->pop(); pScanner->pop();
break; break;
} }
// now it better be a key // now it better be a key
if(token.type != TT_KEY) if(token.type != Token::KEY)
throw ParserException(token.mark, ErrorMsg::END_OF_MAP_FLOW); throw ParserException(token.mark, ErrorMsg::END_OF_MAP_FLOW);
pScanner->pop(); pScanner->pop();
@@ -130,16 +130,16 @@ namespace YAML
pKey->Parse(pScanner, state); pKey->Parse(pScanner, state);
// now grab value (optional) // now grab value (optional)
if(!pScanner->empty() && pScanner->peek().type == TT_VALUE) { if(!pScanner->empty() && pScanner->peek().type == Token::VALUE) {
pScanner->pop(); pScanner->pop();
pValue->Parse(pScanner, state); pValue->Parse(pScanner, state);
} }
// now eat the separator (or could be a map end, which we ignore - but if it's neither, then it's a bad node) // now eat the separator (or could be a map end, which we ignore - but if it's neither, then it's a bad node)
Token& nextToken = pScanner->peek(); Token& nextToken = pScanner->peek();
if(nextToken.type == TT_FLOW_ENTRY) if(nextToken.type == Token::FLOW_ENTRY)
pScanner->pop(); pScanner->pop();
else if(nextToken.type != TT_FLOW_MAP_END) else if(nextToken.type != Token::FLOW_MAP_END)
throw ParserException(nextToken.mark, ErrorMsg::END_OF_MAP_FLOW); throw ParserException(nextToken.mark, ErrorMsg::END_OF_MAP_FLOW);
// assign the map with the actual pointers // assign the map with the actual pointers

View File

@@ -88,15 +88,15 @@ namespace YAML
// now split based on what kind of node we should be // now split based on what kind of node we should be
switch(pScanner->peek().type) { switch(pScanner->peek().type) {
case TT_SCALAR: case Token::SCALAR:
m_pContent = new Scalar; m_pContent = new Scalar;
break; break;
case TT_FLOW_SEQ_START: case Token::FLOW_SEQ_START:
case TT_BLOCK_SEQ_START: case Token::BLOCK_SEQ_START:
m_pContent = new Sequence; m_pContent = new Sequence;
break; break;
case TT_FLOW_MAP_START: case Token::FLOW_MAP_START:
case TT_BLOCK_MAP_START: case Token::BLOCK_MAP_START:
m_pContent = new Map; m_pContent = new Map;
break; break;
default: default:
@@ -124,9 +124,9 @@ namespace YAML
return; return;
switch(pScanner->peek().type) { switch(pScanner->peek().type) {
case TT_TAG: ParseTag(pScanner, state); break; case Token::TAG: ParseTag(pScanner, state); break;
case TT_ANCHOR: ParseAnchor(pScanner, state); break; case Token::ANCHOR: ParseAnchor(pScanner, state); break;
case TT_ALIAS: ParseAlias(pScanner, state); break; case Token::ALIAS: ParseAlias(pScanner, state); break;
default: return; default: return;
} }
} }

View File

@@ -46,14 +46,14 @@ namespace YAML
return; return;
// first eat doc start (optional) // first eat doc start (optional)
if(m_pScanner->peek().type == TT_DOC_START) if(m_pScanner->peek().type == Token::DOC_START)
m_pScanner->pop(); m_pScanner->pop();
// now parse our root node // now parse our root node
document.Parse(m_pScanner, m_state); document.Parse(m_pScanner, m_state);
// and finally eat any doc ends we see // and finally eat any doc ends we see
while(!m_pScanner->empty() && m_pScanner->peek().type == TT_DOC_END) while(!m_pScanner->empty() && m_pScanner->peek().type == Token::DOC_END)
m_pScanner->pop(); m_pScanner->pop();
// clear anchors from the scanner, which are no longer relevant // clear anchors from the scanner, which are no longer relevant
@@ -71,7 +71,7 @@ namespace YAML
break; break;
Token& token = m_pScanner->peek(); Token& token = m_pScanner->peek();
if(token.type != TT_DIRECTIVE) if(token.type != Token::DIRECTIVE)
break; break;
// we keep the directives from the last document if none are specified; // we keep the directives from the last document if none are specified;

View File

@@ -31,7 +31,7 @@ namespace YAML
EnsureTokensInQueue(); EnsureTokensInQueue();
if(!m_tokens.empty()) { if(!m_tokens.empty()) {
// Saved anchors shouldn't survive popping the document end marker // Saved anchors shouldn't survive popping the document end marker
if (m_tokens.front().type == TT_DOC_END) { if (m_tokens.front().type == Token::DOC_END) {
ClearAnchors(); ClearAnchors();
} }
m_tokens.pop(); m_tokens.pop();
@@ -60,11 +60,11 @@ namespace YAML
Token& token = m_tokens.front(); Token& token = m_tokens.front();
// if this guy's valid, then we're done // if this guy's valid, then we're done
if(token.status == TS_VALID) if(token.status == Token::VALID)
return; return;
// here's where we clean up the impossible tokens // here's where we clean up the impossible tokens
if(token.status == TS_INVALID) { if(token.status == Token::INVALID) {
m_tokens.pop(); m_tokens.pop();
continue; continue;
} }
@@ -263,9 +263,9 @@ namespace YAML
// now push // now push
m_indents.push(indent); m_indents.push(indent);
if(type == IndentMarker::SEQ) if(type == IndentMarker::SEQ)
m_tokens.push(Token(TT_BLOCK_SEQ_START, INPUT.mark())); m_tokens.push(Token(Token::BLOCK_SEQ_START, INPUT.mark()));
else if(type == IndentMarker::MAP) else if(type == IndentMarker::MAP)
m_tokens.push(Token(TT_BLOCK_MAP_START, INPUT.mark())); m_tokens.push(Token(Token::BLOCK_MAP_START, INPUT.mark()));
else else
assert(false); assert(false);
@@ -319,9 +319,9 @@ namespace YAML
IndentMarker::INDENT_TYPE type = m_indents.top().type; IndentMarker::INDENT_TYPE type = m_indents.top().type;
m_indents.pop(); m_indents.pop();
if(type == IndentMarker::SEQ) if(type == IndentMarker::SEQ)
m_tokens.push(Token(TT_BLOCK_SEQ_END, INPUT.mark())); m_tokens.push(Token(Token::BLOCK_SEQ_END, INPUT.mark()));
else if(type == IndentMarker::MAP) else if(type == IndentMarker::MAP)
m_tokens.push(Token(TT_BLOCK_MAP_END, INPUT.mark())); m_tokens.push(Token(Token::BLOCK_MAP_END, INPUT.mark()));
} }
// GetTopIndent // GetTopIndent

View File

@@ -50,7 +50,7 @@ namespace YAML
params.push_back(param); params.push_back(param);
} }
Token token(TT_DIRECTIVE, mark); Token token(Token::DIRECTIVE, mark);
token.value = name; token.value = name;
token.params = params; token.params = params;
m_tokens.push(token); m_tokens.push(token);
@@ -66,7 +66,7 @@ namespace YAML
// eat // eat
Mark mark = INPUT.mark(); Mark mark = INPUT.mark();
INPUT.eat(3); INPUT.eat(3);
m_tokens.push(Token(TT_DOC_START, mark)); m_tokens.push(Token(Token::DOC_START, mark));
} }
// DocEnd // DocEnd
@@ -79,7 +79,7 @@ namespace YAML
// eat // eat
Mark mark = INPUT.mark(); Mark mark = INPUT.mark();
INPUT.eat(3); INPUT.eat(3);
m_tokens.push(Token(TT_DOC_END, mark)); m_tokens.push(Token(Token::DOC_END, mark));
} }
// FlowStart // FlowStart
@@ -93,7 +93,7 @@ namespace YAML
// eat // eat
Mark mark = INPUT.mark(); Mark mark = INPUT.mark();
char ch = INPUT.get(); char ch = INPUT.get();
TOKEN_TYPE type = (ch == Keys::FlowSeqStart ? TT_FLOW_SEQ_START : TT_FLOW_MAP_START); Token::TYPE type = (ch == Keys::FlowSeqStart ? Token::FLOW_SEQ_START : Token::FLOW_MAP_START);
m_tokens.push(Token(type, mark)); m_tokens.push(Token(type, mark));
} }
@@ -109,7 +109,7 @@ namespace YAML
// eat // eat
Mark mark = INPUT.mark(); Mark mark = INPUT.mark();
char ch = INPUT.get(); char ch = INPUT.get();
TOKEN_TYPE type = (ch == Keys::FlowSeqEnd ? TT_FLOW_SEQ_END : TT_FLOW_MAP_END); Token::TYPE type = (ch == Keys::FlowSeqEnd ? Token::FLOW_SEQ_END : Token::FLOW_MAP_END);
m_tokens.push(Token(type, mark)); m_tokens.push(Token(type, mark));
} }
@@ -121,7 +121,7 @@ namespace YAML
// eat // eat
Mark mark = INPUT.mark(); Mark mark = INPUT.mark();
INPUT.eat(1); INPUT.eat(1);
m_tokens.push(Token(TT_FLOW_ENTRY, mark)); m_tokens.push(Token(Token::FLOW_ENTRY, mark));
} }
// BlockEntry // BlockEntry
@@ -141,7 +141,7 @@ namespace YAML
// eat // eat
Mark mark = INPUT.mark(); Mark mark = INPUT.mark();
INPUT.eat(1); INPUT.eat(1);
m_tokens.push(Token(TT_BLOCK_ENTRY, mark)); m_tokens.push(Token(Token::BLOCK_ENTRY, mark));
} }
// Key // Key
@@ -164,7 +164,7 @@ namespace YAML
// eat // eat
Mark mark = INPUT.mark(); Mark mark = INPUT.mark();
INPUT.eat(1); INPUT.eat(1);
m_tokens.push(Token(TT_KEY, mark)); m_tokens.push(Token(Token::KEY, mark));
} }
// Value // Value
@@ -199,7 +199,7 @@ namespace YAML
// eat // eat
Mark mark = INPUT.mark(); Mark mark = INPUT.mark();
INPUT.eat(1); INPUT.eat(1);
m_tokens.push(Token(TT_VALUE, mark)); m_tokens.push(Token(Token::VALUE, mark));
} }
// AnchorOrAlias // AnchorOrAlias
@@ -231,7 +231,7 @@ namespace YAML
throw ParserException(INPUT.mark(), alias ? ErrorMsg::CHAR_IN_ALIAS : ErrorMsg::CHAR_IN_ANCHOR); throw ParserException(INPUT.mark(), alias ? ErrorMsg::CHAR_IN_ALIAS : ErrorMsg::CHAR_IN_ANCHOR);
// and we're done // and we're done
Token token(alias ? TT_ALIAS : TT_ANCHOR, mark); Token token(alias ? Token::ALIAS : Token::ANCHOR, mark);
token.value = name; token.value = name;
m_tokens.push(token); m_tokens.push(token);
} }
@@ -268,7 +268,7 @@ namespace YAML
handle = "!"; handle = "!";
} }
Token token(TT_TAG, mark); Token token(Token::TAG, mark);
token.value = handle; token.value = handle;
token.params.push_back(suffix); token.params.push_back(suffix);
m_tokens.push(token); m_tokens.push(token);
@@ -305,7 +305,7 @@ namespace YAML
//if(Exp::IllegalCharInScalar.Matches(INPUT)) //if(Exp::IllegalCharInScalar.Matches(INPUT))
// throw ParserException(INPUT.mark(), ErrorMsg::CHAR_IN_SCALAR); // throw ParserException(INPUT.mark(), ErrorMsg::CHAR_IN_SCALAR);
Token token(TT_SCALAR, mark); Token token(Token::SCALAR, mark);
token.value = scalar; token.value = scalar;
m_tokens.push(token); m_tokens.push(token);
} }
@@ -344,7 +344,7 @@ namespace YAML
scalar = ScanScalar(INPUT, params); scalar = ScanScalar(INPUT, params);
m_simpleKeyAllowed = false; m_simpleKeyAllowed = false;
Token token(TT_SCALAR, mark); Token token(Token::SCALAR, mark);
token.value = scalar; token.value = scalar;
m_tokens.push(token); m_tokens.push(token);
} }
@@ -409,7 +409,7 @@ namespace YAML
// simple keys always ok after block scalars (since we're gonna start a new line anyways) // simple keys always ok after block scalars (since we're gonna start a new line anyways)
m_simpleKeyAllowed = true; m_simpleKeyAllowed = true;
Token token(TT_SCALAR, mark); Token token(Token::SCALAR, mark);
token.value = scalar; token.value = scalar;
m_tokens.push(token); m_tokens.push(token);
} }

View File

@@ -66,8 +66,8 @@ namespace YAML
// split based on start token // split based on start token
switch(pScanner->peek().type) { switch(pScanner->peek().type) {
case TT_BLOCK_SEQ_START: ParseBlock(pScanner, state); break; case Token::BLOCK_SEQ_START: ParseBlock(pScanner, state); break;
case TT_FLOW_SEQ_START: ParseFlow(pScanner, state); break; case Token::FLOW_SEQ_START: ParseFlow(pScanner, state); break;
default: break; default: break;
} }
} }
@@ -82,11 +82,11 @@ namespace YAML
throw ParserException(Mark::null(), ErrorMsg::END_OF_SEQ); throw ParserException(Mark::null(), ErrorMsg::END_OF_SEQ);
Token token = pScanner->peek(); Token token = pScanner->peek();
if(token.type != TT_BLOCK_ENTRY && token.type != TT_BLOCK_SEQ_END) if(token.type != Token::BLOCK_ENTRY && token.type != Token::BLOCK_SEQ_END)
throw ParserException(token.mark, ErrorMsg::END_OF_SEQ); throw ParserException(token.mark, ErrorMsg::END_OF_SEQ);
pScanner->pop(); pScanner->pop();
if(token.type == TT_BLOCK_SEQ_END) if(token.type == Token::BLOCK_SEQ_END)
break; break;
Node *pNode = new Node; Node *pNode = new Node;
@@ -95,7 +95,7 @@ namespace YAML
// check for null // check for null
if(!pScanner->empty()) { if(!pScanner->empty()) {
const Token& token = pScanner->peek(); const Token& token = pScanner->peek();
if(token.type == TT_BLOCK_ENTRY || token.type == TT_BLOCK_SEQ_END) if(token.type == Token::BLOCK_ENTRY || token.type == Token::BLOCK_SEQ_END)
continue; continue;
} }
@@ -113,7 +113,7 @@ namespace YAML
throw ParserException(Mark::null(), ErrorMsg::END_OF_SEQ_FLOW); throw ParserException(Mark::null(), ErrorMsg::END_OF_SEQ_FLOW);
// first check for end // first check for end
if(pScanner->peek().type == TT_FLOW_SEQ_END) { if(pScanner->peek().type == Token::FLOW_SEQ_END) {
pScanner->pop(); pScanner->pop();
break; break;
} }
@@ -125,9 +125,9 @@ namespace YAML
// now eat the separator (or could be a sequence end, which we ignore - but if it's neither, then it's a bad node) // now eat the separator (or could be a sequence end, which we ignore - but if it's neither, then it's a bad node)
Token& token = pScanner->peek(); Token& token = pScanner->peek();
if(token.type == TT_FLOW_ENTRY) if(token.type == Token::FLOW_ENTRY)
pScanner->pop(); pScanner->pop();
else if(token.type != TT_FLOW_SEQ_END) else if(token.type != Token::FLOW_SEQ_END)
throw ParserException(token.mark, ErrorMsg::END_OF_SEQ_FLOW); throw ParserException(token.mark, ErrorMsg::END_OF_SEQ_FLOW);
} }
} }

View File

@@ -14,17 +14,17 @@ namespace YAML
void Scanner::SimpleKey::Validate() void Scanner::SimpleKey::Validate()
{ {
if(pMapStart) if(pMapStart)
pMapStart->status = TS_VALID; pMapStart->status = Token::VALID;
if(pKey) if(pKey)
pKey->status = TS_VALID; pKey->status = Token::VALID;
} }
void Scanner::SimpleKey::Invalidate() void Scanner::SimpleKey::Invalidate()
{ {
if(pMapStart) if(pMapStart)
pMapStart->status = TS_INVALID; pMapStart->status = Token::INVALID;
if(pKey) if(pKey)
pKey->status = TS_INVALID; pKey->status = Token::INVALID;
} }
// InsertSimpleKey // InsertSimpleKey
@@ -37,12 +37,12 @@ namespace YAML
// first add a map start, if necessary // first add a map start, if necessary
key.pMapStart = PushIndentTo(INPUT.column(), IndentMarker::MAP); key.pMapStart = PushIndentTo(INPUT.column(), IndentMarker::MAP);
if(key.pMapStart) if(key.pMapStart)
key.pMapStart->status = TS_UNVERIFIED; key.pMapStart->status = Token::UNVERIFIED;
// then add the (now unverified) key // then add the (now unverified) key
m_tokens.push(Token(TT_KEY, INPUT.mark())); m_tokens.push(Token(Token::KEY, INPUT.mark()));
key.pKey = &m_tokens.back(); key.pKey = &m_tokens.back();
key.pKey->status = TS_UNVERIFIED; key.pKey->status = Token::UNVERIFIED;
m_simpleKeys.push(key); m_simpleKeys.push(key);
} }

View File

@@ -11,29 +11,6 @@
namespace YAML namespace YAML
{ {
enum TOKEN_STATUS { TS_VALID, TS_INVALID, TS_UNVERIFIED };
enum TOKEN_TYPE {
TT_DIRECTIVE,
TT_DOC_START,
TT_DOC_END,
TT_BLOCK_SEQ_START,
TT_BLOCK_MAP_START,
TT_BLOCK_SEQ_END,
TT_BLOCK_MAP_END,
TT_BLOCK_ENTRY,
TT_FLOW_SEQ_START,
TT_FLOW_MAP_START,
TT_FLOW_SEQ_END,
TT_FLOW_MAP_END,
TT_FLOW_ENTRY,
TT_KEY,
TT_VALUE,
TT_ANCHOR,
TT_ALIAS,
TT_TAG,
TT_SCALAR
};
const std::string TokenNames[] = { const std::string TokenNames[] = {
"DIRECTIVE", "DIRECTIVE",
"DOC_START", "DOC_START",
@@ -57,7 +34,32 @@ namespace YAML
}; };
struct Token { struct Token {
Token(TOKEN_TYPE type_, const Mark& mark_): status(TS_VALID), type(type_), mark(mark_) {} // enums
enum STATUS { VALID, INVALID, UNVERIFIED };
enum TYPE {
DIRECTIVE,
DOC_START,
DOC_END,
BLOCK_SEQ_START,
BLOCK_MAP_START,
BLOCK_SEQ_END,
BLOCK_MAP_END,
BLOCK_ENTRY,
FLOW_SEQ_START,
FLOW_MAP_START,
FLOW_SEQ_END,
FLOW_MAP_END,
FLOW_ENTRY,
KEY,
VALUE,
ANCHOR,
ALIAS,
TAG,
SCALAR
};
// data
Token(TYPE type_, const Mark& mark_): status(VALID), type(type_), mark(mark_) {}
friend std::ostream& operator << (std::ostream& out, const Token& token) { friend std::ostream& operator << (std::ostream& out, const Token& token) {
out << TokenNames[token.type] << std::string(": ") << token.value; out << TokenNames[token.type] << std::string(": ") << token.value;
@@ -66,8 +68,8 @@ namespace YAML
return out; return out;
} }
TOKEN_STATUS status; STATUS status;
TOKEN_TYPE type; TYPE type;
Mark mark; Mark mark;
std::string value; std::string value;
std::vector <std::string> params; std::vector <std::string> params;