mirror of
https://github.com/jbeder/yaml-cpp.git
synced 2025-09-09 12:41:17 +00:00
Replaced the queue of Token pointers with values.
We were getting memory leaks (as told by the CRT detectors, which I also added), and there's really no reason (as long as we're careful) to use pointers there.
This commit is contained in:
@@ -1,3 +1,4 @@
|
||||
#include "crt.h"
|
||||
#include "content.h"
|
||||
|
||||
namespace YAML
|
||||
|
@@ -1,3 +1,4 @@
|
||||
#include "crt.h"
|
||||
#include "exp.h"
|
||||
#include "exceptions.h"
|
||||
#include <sstream>
|
||||
|
@@ -1,3 +1,4 @@
|
||||
#include "crt.h"
|
||||
#include "node.h"
|
||||
#include "exceptions.h"
|
||||
|
||||
|
53
src/map.cpp
53
src/map.cpp
@@ -1,3 +1,4 @@
|
||||
#include "crt.h"
|
||||
#include "map.h"
|
||||
#include "node.h"
|
||||
#include "scanner.h"
|
||||
@@ -41,9 +42,9 @@ namespace YAML
|
||||
Clear();
|
||||
|
||||
// split based on start token
|
||||
Token *pToken = pScanner->PeekNextToken();
|
||||
Token& token = pScanner->PeekToken();
|
||||
|
||||
switch(pToken->type) {
|
||||
switch(token.type) {
|
||||
case TT_BLOCK_MAP_START: ParseBlock(pScanner, state); break;
|
||||
case TT_FLOW_MAP_START: ParseFlow(pScanner, state); break;
|
||||
}
|
||||
@@ -52,18 +53,18 @@ namespace YAML
|
||||
void Map::ParseBlock(Scanner *pScanner, const ParserState& state)
|
||||
{
|
||||
// eat start token
|
||||
pScanner->EatNextToken();
|
||||
pScanner->PopToken();
|
||||
|
||||
while(1) {
|
||||
Token *pToken = pScanner->PeekNextToken();
|
||||
if(!pToken)
|
||||
if(pScanner->IsEmpty())
|
||||
throw ParserException(-1, -1, ErrorMsg::END_OF_MAP);
|
||||
|
||||
if(pToken->type != TT_KEY && pToken->type != TT_BLOCK_END)
|
||||
throw ParserException(pToken->line, pToken->column, ErrorMsg::END_OF_MAP);
|
||||
Token token = pScanner->PeekToken();
|
||||
if(token.type != TT_KEY && token.type != TT_BLOCK_END)
|
||||
throw ParserException(token.line, token.column, ErrorMsg::END_OF_MAP);
|
||||
|
||||
pScanner->PopNextToken();
|
||||
if(pToken->type == TT_BLOCK_END)
|
||||
pScanner->PopToken();
|
||||
if(token.type == TT_BLOCK_END)
|
||||
break;
|
||||
|
||||
Node *pKey = new Node;
|
||||
@@ -74,8 +75,8 @@ namespace YAML
|
||||
pKey->Parse(pScanner, state);
|
||||
|
||||
// now grab value (optional)
|
||||
if(pScanner->PeekNextToken() && pScanner->PeekNextToken()->type == TT_VALUE) {
|
||||
pScanner->PopNextToken();
|
||||
if(!pScanner->IsEmpty() && pScanner->PeekToken().type == TT_VALUE) {
|
||||
pScanner->PopToken();
|
||||
pValue->Parse(pScanner, state);
|
||||
}
|
||||
|
||||
@@ -91,24 +92,24 @@ namespace YAML
|
||||
void Map::ParseFlow(Scanner *pScanner, const ParserState& state)
|
||||
{
|
||||
// eat start token
|
||||
pScanner->EatNextToken();
|
||||
pScanner->PopToken();
|
||||
|
||||
while(1) {
|
||||
Token *pToken = pScanner->PeekNextToken();
|
||||
if(!pToken)
|
||||
if(pScanner->IsEmpty())
|
||||
throw ParserException(-1, -1, ErrorMsg::END_OF_MAP_FLOW);
|
||||
|
||||
Token& token = pScanner->PeekToken();
|
||||
// first check for end
|
||||
if(pToken->type == TT_FLOW_MAP_END) {
|
||||
pScanner->EatNextToken();
|
||||
if(token.type == TT_FLOW_MAP_END) {
|
||||
pScanner->PopToken();
|
||||
break;
|
||||
}
|
||||
|
||||
// now it better be a key
|
||||
if(pToken->type != TT_KEY)
|
||||
throw ParserException(pToken->line, pToken->column, ErrorMsg::END_OF_MAP_FLOW);
|
||||
if(token.type != TT_KEY)
|
||||
throw ParserException(token.line, token.column, ErrorMsg::END_OF_MAP_FLOW);
|
||||
|
||||
pScanner->PopNextToken();
|
||||
pScanner->PopToken();
|
||||
|
||||
Node *pKey = new Node;
|
||||
Node *pValue = new Node;
|
||||
@@ -118,17 +119,17 @@ namespace YAML
|
||||
pKey->Parse(pScanner, state);
|
||||
|
||||
// now grab value (optional)
|
||||
if(pScanner->PeekNextToken() && pScanner->PeekNextToken()->type == TT_VALUE) {
|
||||
pScanner->PopNextToken();
|
||||
if(!pScanner->IsEmpty() && pScanner->PeekToken().type == TT_VALUE) {
|
||||
pScanner->PopToken();
|
||||
pValue->Parse(pScanner, state);
|
||||
}
|
||||
|
||||
// now eat the separator (or could be a map end, which we ignore - but if it's neither, then it's a bad node)
|
||||
pToken = pScanner->PeekNextToken();
|
||||
if(pToken->type == TT_FLOW_ENTRY)
|
||||
pScanner->EatNextToken();
|
||||
else if(pToken->type != TT_FLOW_MAP_END)
|
||||
throw ParserException(pToken->line, pToken->column, ErrorMsg::END_OF_MAP_FLOW);
|
||||
Token& nextToken = pScanner->PeekToken();
|
||||
if(nextToken.type == TT_FLOW_ENTRY)
|
||||
pScanner->PopToken();
|
||||
else if(nextToken.type != TT_FLOW_MAP_END)
|
||||
throw ParserException(nextToken.line, nextToken.column, ErrorMsg::END_OF_MAP_FLOW);
|
||||
|
||||
m_data[pKey] = pValue;
|
||||
} catch(Exception& e) {
|
||||
|
46
src/node.cpp
46
src/node.cpp
@@ -1,3 +1,4 @@
|
||||
#include "crt.h"
|
||||
#include "node.h"
|
||||
#include "token.h"
|
||||
#include "scanner.h"
|
||||
@@ -42,11 +43,7 @@ namespace YAML
|
||||
return;
|
||||
|
||||
// now split based on what kind of node we should be
|
||||
Token *pToken = pScanner->PeekNextToken();
|
||||
if(pToken->type == TT_DOC_END)
|
||||
return;
|
||||
|
||||
switch(pToken->type) {
|
||||
switch(pScanner->PeekToken().type) {
|
||||
case TT_SCALAR:
|
||||
m_pContent = new Scalar;
|
||||
m_pContent->Parse(pScanner, state);
|
||||
@@ -61,6 +58,7 @@ namespace YAML
|
||||
case TT_BLOCK_MAP_START:
|
||||
m_pContent = new Map;
|
||||
m_pContent->Parse(pScanner, state);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -69,53 +67,53 @@ namespace YAML
|
||||
void Node::ParseHeader(Scanner *pScanner, const ParserState& state)
|
||||
{
|
||||
while(1) {
|
||||
Token *pToken = pScanner->PeekNextToken();
|
||||
if(!pToken || (pToken->type != TT_TAG && pToken->type != TT_ANCHOR && pToken->type != TT_ALIAS))
|
||||
break;
|
||||
if(pScanner->IsEmpty())
|
||||
return;
|
||||
|
||||
switch(pToken->type) {
|
||||
switch(pScanner->PeekToken().type) {
|
||||
case TT_TAG: ParseTag(pScanner, state); break;
|
||||
case TT_ANCHOR: ParseAnchor(pScanner, state); break;
|
||||
case TT_ALIAS: ParseAlias(pScanner, state); break;
|
||||
default: return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Node::ParseTag(Scanner *pScanner, const ParserState& state)
|
||||
{
|
||||
Token *pToken = pScanner->PeekNextToken();
|
||||
Token& token = pScanner->PeekToken();
|
||||
if(m_tag != "")
|
||||
throw ParserException(pToken->line, pToken->column, ErrorMsg::MULTIPLE_TAGS);
|
||||
throw ParserException(token.line, token.column, ErrorMsg::MULTIPLE_TAGS);
|
||||
|
||||
m_tag = state.TranslateTag(pToken->value);
|
||||
m_tag = state.TranslateTag(token.value);
|
||||
|
||||
for(unsigned i=0;i<pToken->params.size();i++)
|
||||
m_tag += pToken->params[i];
|
||||
pScanner->PopNextToken();
|
||||
for(unsigned i=0;i<token.params.size();i++)
|
||||
m_tag += token.params[i];
|
||||
pScanner->PopToken();
|
||||
}
|
||||
|
||||
void Node::ParseAnchor(Scanner *pScanner, const ParserState& state)
|
||||
{
|
||||
Token *pToken = pScanner->PeekNextToken();
|
||||
Token& token = pScanner->PeekToken();
|
||||
if(m_anchor != "")
|
||||
throw ParserException(pToken->line, pToken->column, ErrorMsg::MULTIPLE_ANCHORS);
|
||||
throw ParserException(token.line, token.column, ErrorMsg::MULTIPLE_ANCHORS);
|
||||
|
||||
m_anchor = pToken->value;
|
||||
m_anchor = token.value;
|
||||
m_alias = false;
|
||||
pScanner->PopNextToken();
|
||||
pScanner->PopToken();
|
||||
}
|
||||
|
||||
void Node::ParseAlias(Scanner *pScanner, const ParserState& state)
|
||||
{
|
||||
Token *pToken = pScanner->PeekNextToken();
|
||||
Token& token = pScanner->PeekToken();
|
||||
if(m_anchor != "")
|
||||
throw ParserException(pToken->line, pToken->column, ErrorMsg::MULTIPLE_ALIASES);
|
||||
throw ParserException(token.line, token.column, ErrorMsg::MULTIPLE_ALIASES);
|
||||
if(m_tag != "")
|
||||
throw ParserException(pToken->line, pToken->column, ErrorMsg::ALIAS_CONTENT);
|
||||
throw ParserException(token.line, token.column, ErrorMsg::ALIAS_CONTENT);
|
||||
|
||||
m_anchor = pToken->value;
|
||||
m_anchor = token.value;
|
||||
m_alias = true;
|
||||
pScanner->PopNextToken();
|
||||
pScanner->PopToken();
|
||||
}
|
||||
|
||||
void Node::Write(std::ostream& out, int indent, bool startedLine, bool onlyOneCharOnLine) const
|
||||
|
@@ -1,3 +1,4 @@
|
||||
#include "crt.h"
|
||||
#include "parser.h"
|
||||
#include "scanner.h"
|
||||
#include "token.h"
|
||||
@@ -18,7 +19,7 @@ namespace YAML
|
||||
|
||||
Parser::operator bool() const
|
||||
{
|
||||
return m_pScanner->PeekNextToken() != 0;
|
||||
return !m_pScanner->IsEmpty();
|
||||
}
|
||||
|
||||
void Parser::Load(std::istream& in)
|
||||
@@ -40,19 +41,19 @@ namespace YAML
|
||||
ParseDirectives();
|
||||
|
||||
// we better have some tokens in the queue
|
||||
if(!m_pScanner->PeekNextToken())
|
||||
if(m_pScanner->IsEmpty())
|
||||
return;
|
||||
|
||||
// first eat doc start (optional)
|
||||
if(m_pScanner->PeekNextToken()->type == TT_DOC_START)
|
||||
m_pScanner->EatNextToken();
|
||||
if(m_pScanner->PeekToken().type == TT_DOC_START)
|
||||
m_pScanner->PopToken();
|
||||
|
||||
// now parse our root node
|
||||
document.Parse(m_pScanner, m_state);
|
||||
|
||||
// and finally eat any doc ends we see
|
||||
while(m_pScanner->PeekNextToken() && m_pScanner->PeekNextToken()->type == TT_DOC_END)
|
||||
m_pScanner->EatNextToken();
|
||||
while(!m_pScanner->IsEmpty() && m_pScanner->PeekToken().type == TT_DOC_END)
|
||||
m_pScanner->PopToken();
|
||||
}
|
||||
|
||||
// ParseDirectives
|
||||
@@ -62,8 +63,11 @@ namespace YAML
|
||||
bool readDirective = false;
|
||||
|
||||
while(1) {
|
||||
Token *pToken = m_pScanner->PeekNextToken();
|
||||
if(!pToken || pToken->type != TT_DIRECTIVE)
|
||||
if(m_pScanner->IsEmpty())
|
||||
break;
|
||||
|
||||
Token& token = m_pScanner->PeekToken();
|
||||
if(token.type != TT_DIRECTIVE)
|
||||
break;
|
||||
|
||||
// we keep the directives from the last document if none are specified;
|
||||
@@ -72,8 +76,8 @@ namespace YAML
|
||||
m_state.Reset();
|
||||
|
||||
readDirective = true;
|
||||
HandleDirective(pToken);
|
||||
m_pScanner->PopNextToken();
|
||||
HandleDirective(&token);
|
||||
m_pScanner->PopToken();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -119,11 +123,11 @@ namespace YAML
|
||||
void Parser::PrintTokens(std::ostream& out)
|
||||
{
|
||||
while(1) {
|
||||
Token *pToken = m_pScanner->GetNextToken();
|
||||
if(!pToken)
|
||||
if(m_pScanner->IsEmpty())
|
||||
break;
|
||||
|
||||
out << *pToken << std::endl;
|
||||
out << m_pScanner->PeekToken() << std::endl;
|
||||
m_pScanner->PopToken();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,3 +1,4 @@
|
||||
#include "crt.h"
|
||||
#include "parserstate.h"
|
||||
|
||||
namespace YAML
|
||||
|
@@ -1,3 +1,4 @@
|
||||
#include "crt.h"
|
||||
#include "regex.h"
|
||||
|
||||
namespace YAML
|
||||
|
@@ -1,3 +1,4 @@
|
||||
#include "crt.h"
|
||||
#include "scalar.h"
|
||||
#include "scanner.h"
|
||||
#include "token.h"
|
||||
@@ -16,9 +17,9 @@ namespace YAML
|
||||
|
||||
void Scalar::Parse(Scanner *pScanner, const ParserState& state)
|
||||
{
|
||||
Token *pToken = pScanner->GetNextToken();
|
||||
m_data = pToken->value;
|
||||
delete pToken;
|
||||
Token& token = pScanner->PeekToken();
|
||||
m_data = token.value;
|
||||
pScanner->PopToken();
|
||||
}
|
||||
|
||||
void Scalar::Write(std::ostream& out, int indent, bool startedLine, bool onlyOneCharOnLine)
|
||||
|
@@ -1,3 +1,4 @@
|
||||
#include "crt.h"
|
||||
#include "scanner.h"
|
||||
#include "token.h"
|
||||
#include "exceptions.h"
|
||||
@@ -12,62 +13,46 @@ namespace YAML
|
||||
|
||||
Scanner::~Scanner()
|
||||
{
|
||||
while(!m_tokens.empty()) {
|
||||
delete m_tokens.front();
|
||||
m_tokens.pop();
|
||||
}
|
||||
}
|
||||
|
||||
// GetNextToken
|
||||
// . Removes and returns the next token on the queue.
|
||||
Token *Scanner::GetNextToken()
|
||||
// IsEmpty
|
||||
// . Returns true if there are no more tokens to be read
|
||||
bool Scanner::IsEmpty()
|
||||
{
|
||||
Token *pToken = PeekNextToken();
|
||||
PeekToken(); // to ensure that there are tokens in the queue, if possible
|
||||
return m_tokens.empty();
|
||||
}
|
||||
|
||||
// PopToken
|
||||
// . Simply removes the next token on the queue.
|
||||
void Scanner::PopToken()
|
||||
{
|
||||
PeekToken(); // to ensure that there are tokens in the queue
|
||||
if(!m_tokens.empty())
|
||||
m_tokens.pop();
|
||||
return pToken;
|
||||
}
|
||||
|
||||
// PopNextToken
|
||||
// . Simply removes the next token on the queue.
|
||||
void Scanner::PopNextToken()
|
||||
{
|
||||
GetNextToken();
|
||||
}
|
||||
|
||||
// EatNextToken
|
||||
// . Removes and deletes the next token on the queue
|
||||
void Scanner::EatNextToken()
|
||||
{
|
||||
delete GetNextToken();
|
||||
}
|
||||
|
||||
// PeekNextToken
|
||||
// PeekToken
|
||||
// . Returns (but does not remove) the next token on the queue, and scans if only we need to.
|
||||
Token *Scanner::PeekNextToken()
|
||||
Token& Scanner::PeekToken()
|
||||
{
|
||||
while(1) {
|
||||
Token *pToken = 0;
|
||||
if(!m_tokens.empty()) {
|
||||
Token& token = m_tokens.front();
|
||||
|
||||
// is there a token in the queue?
|
||||
if(!m_tokens.empty())
|
||||
pToken = m_tokens.front();
|
||||
// return this guy if it's valid
|
||||
if(token.status == TS_VALID)
|
||||
return token;
|
||||
|
||||
// (here's where we clean up the impossible tokens)
|
||||
if(pToken && pToken->status == TS_INVALID) {
|
||||
m_tokens.pop();
|
||||
delete pToken;
|
||||
continue;
|
||||
// here's where we clean up the impossible tokens
|
||||
if(token.status == TS_INVALID) {
|
||||
m_tokens.pop();
|
||||
continue;
|
||||
}
|
||||
|
||||
// note: what's left are the unverified tokens
|
||||
}
|
||||
|
||||
// on unverified tokens, we just have to wait
|
||||
if(pToken && pToken->status == TS_UNVERIFIED)
|
||||
pToken = 0;
|
||||
|
||||
// then that's what we want
|
||||
if(pToken)
|
||||
return pToken;
|
||||
|
||||
// no token? maybe we've actually finished
|
||||
if(m_endedStream)
|
||||
break;
|
||||
@@ -76,7 +61,7 @@ namespace YAML
|
||||
ScanNextToken();
|
||||
}
|
||||
|
||||
return 0;
|
||||
// TODO: find something to return here, or assert (but can't do that! maybe split into two functions?)
|
||||
}
|
||||
|
||||
// ScanNextToken
|
||||
@@ -254,14 +239,12 @@ namespace YAML
|
||||
|
||||
// now push
|
||||
m_indents.push(column);
|
||||
Token *pToken = 0;
|
||||
if(sequence)
|
||||
pToken = new Token(TT_BLOCK_SEQ_START, INPUT.line, INPUT.column);
|
||||
m_tokens.push(Token(TT_BLOCK_SEQ_START, INPUT.line, INPUT.column));
|
||||
else
|
||||
pToken = new Token(TT_BLOCK_MAP_START, INPUT.line, INPUT.column);
|
||||
m_tokens.push(Token(TT_BLOCK_MAP_START, INPUT.line, INPUT.column));
|
||||
|
||||
m_tokens.push(pToken);
|
||||
return pToken;
|
||||
return &m_tokens.back();
|
||||
}
|
||||
|
||||
// PopIndentTo
|
||||
@@ -276,7 +259,7 @@ namespace YAML
|
||||
// now pop away
|
||||
while(!m_indents.empty() && m_indents.top() > column) {
|
||||
m_indents.pop();
|
||||
m_tokens.push(new Token(TT_BLOCK_END, INPUT.line, INPUT.column));
|
||||
m_tokens.push(Token(TT_BLOCK_END, INPUT.line, INPUT.column));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -6,21 +6,19 @@
|
||||
#include <stack>
|
||||
#include <set>
|
||||
#include "stream.h"
|
||||
#include "token.h"
|
||||
|
||||
namespace YAML
|
||||
{
|
||||
struct Token;
|
||||
|
||||
class Scanner
|
||||
{
|
||||
public:
|
||||
Scanner(std::istream& in);
|
||||
~Scanner();
|
||||
|
||||
Token *GetNextToken();
|
||||
void EatNextToken();
|
||||
void PopNextToken();
|
||||
Token *PeekNextToken();
|
||||
bool IsEmpty();
|
||||
void PopToken();
|
||||
Token& PeekToken();
|
||||
|
||||
private:
|
||||
// scanning
|
||||
@@ -72,7 +70,7 @@ namespace YAML
|
||||
Stream INPUT;
|
||||
|
||||
// the output (tokens)
|
||||
std::queue <Token *> m_tokens;
|
||||
std::queue <Token> m_tokens;
|
||||
|
||||
// state info
|
||||
bool m_startedStream, m_endedStream;
|
||||
|
@@ -1,3 +1,4 @@
|
||||
#include "crt.h"
|
||||
#include "scanscalar.h"
|
||||
#include "scanner.h"
|
||||
#include "exp.h"
|
||||
|
@@ -1,3 +1,4 @@
|
||||
#include "crt.h"
|
||||
#include "scanner.h"
|
||||
#include "token.h"
|
||||
#include "exceptions.h"
|
||||
@@ -49,10 +50,10 @@ namespace YAML
|
||||
params.push_back(param);
|
||||
}
|
||||
|
||||
Token *pToken = new Token(TT_DIRECTIVE, line, column);
|
||||
pToken->value = name;
|
||||
pToken->params = params;
|
||||
m_tokens.push(pToken);
|
||||
Token token(TT_DIRECTIVE, line, column);
|
||||
token.value = name;
|
||||
token.params = params;
|
||||
m_tokens.push(token);
|
||||
}
|
||||
|
||||
// DocStart
|
||||
@@ -65,7 +66,7 @@ namespace YAML
|
||||
// eat
|
||||
int line = INPUT.line, column = INPUT.column;
|
||||
INPUT.eat(3);
|
||||
m_tokens.push(new Token(TT_DOC_START, line, column));
|
||||
m_tokens.push(Token(TT_DOC_START, line, column));
|
||||
}
|
||||
|
||||
// DocEnd
|
||||
@@ -78,7 +79,7 @@ namespace YAML
|
||||
// eat
|
||||
int line = INPUT.line, column = INPUT.column;
|
||||
INPUT.eat(3);
|
||||
m_tokens.push(new Token(TT_DOC_END, line, column));
|
||||
m_tokens.push(Token(TT_DOC_END, line, column));
|
||||
}
|
||||
|
||||
// FlowStart
|
||||
@@ -93,7 +94,7 @@ namespace YAML
|
||||
int line = INPUT.line, column = INPUT.column;
|
||||
char ch = INPUT.get();
|
||||
TOKEN_TYPE type = (ch == Keys::FlowSeqStart ? TT_FLOW_SEQ_START : TT_FLOW_MAP_START);
|
||||
m_tokens.push(new Token(type, line, column));
|
||||
m_tokens.push(Token(type, line, column));
|
||||
}
|
||||
|
||||
// FlowEnd
|
||||
@@ -109,7 +110,7 @@ namespace YAML
|
||||
int line = INPUT.line, column = INPUT.column;
|
||||
char ch = INPUT.get();
|
||||
TOKEN_TYPE type = (ch == Keys::FlowSeqEnd ? TT_FLOW_SEQ_END : TT_FLOW_MAP_END);
|
||||
m_tokens.push(new Token(type, line, column));
|
||||
m_tokens.push(Token(type, line, column));
|
||||
}
|
||||
|
||||
// FlowEntry
|
||||
@@ -120,7 +121,7 @@ namespace YAML
|
||||
// eat
|
||||
int line = INPUT.line, column = INPUT.column;
|
||||
INPUT.eat(1);
|
||||
m_tokens.push(new Token(TT_FLOW_ENTRY, line, column));
|
||||
m_tokens.push(Token(TT_FLOW_ENTRY, line, column));
|
||||
}
|
||||
|
||||
// BlockEntry
|
||||
@@ -140,7 +141,7 @@ namespace YAML
|
||||
// eat
|
||||
int line = INPUT.line, column = INPUT.column;
|
||||
INPUT.eat(1);
|
||||
m_tokens.push(new Token(TT_BLOCK_ENTRY, line, column));
|
||||
m_tokens.push(Token(TT_BLOCK_ENTRY, line, column));
|
||||
}
|
||||
|
||||
// Key
|
||||
@@ -163,7 +164,7 @@ namespace YAML
|
||||
// eat
|
||||
int line = INPUT.line, column = INPUT.column;
|
||||
INPUT.eat(1);
|
||||
m_tokens.push(new Token(TT_KEY, line, column));
|
||||
m_tokens.push(Token(TT_KEY, line, column));
|
||||
}
|
||||
|
||||
// Value
|
||||
@@ -192,7 +193,7 @@ namespace YAML
|
||||
// eat
|
||||
int line = INPUT.line, column = INPUT.column;
|
||||
INPUT.eat(1);
|
||||
m_tokens.push(new Token(TT_VALUE, line, column));
|
||||
m_tokens.push(Token(TT_VALUE, line, column));
|
||||
}
|
||||
|
||||
// AnchorOrAlias
|
||||
@@ -224,9 +225,9 @@ namespace YAML
|
||||
throw ParserException(INPUT.line, INPUT.column, alias ? ErrorMsg::CHAR_IN_ALIAS : ErrorMsg::CHAR_IN_ANCHOR);
|
||||
|
||||
// and we're done
|
||||
Token *pToken = new Token(alias ? TT_ALIAS : TT_ANCHOR, line, column);
|
||||
pToken->value = name;
|
||||
m_tokens.push(pToken);
|
||||
Token token(alias ? TT_ALIAS : TT_ANCHOR, line, column);
|
||||
token.value = name;
|
||||
m_tokens.push(token);
|
||||
}
|
||||
|
||||
// Tag
|
||||
@@ -261,10 +262,10 @@ namespace YAML
|
||||
handle = "!";
|
||||
}
|
||||
|
||||
Token *pToken = new Token(TT_TAG, line, column);
|
||||
pToken->value = handle;
|
||||
pToken->params.push_back(suffix);
|
||||
m_tokens.push(pToken);
|
||||
Token token(TT_TAG, line, column);
|
||||
token.value = handle;
|
||||
token.params.push_back(suffix);
|
||||
m_tokens.push(token);
|
||||
}
|
||||
|
||||
// PlainScalar
|
||||
@@ -298,9 +299,9 @@ namespace YAML
|
||||
//if(Exp::IllegalCharInScalar.Matches(INPUT))
|
||||
// throw ParserException(INPUT.line, INPUT.column, ErrorMsg::CHAR_IN_SCALAR);
|
||||
|
||||
Token *pToken = new Token(TT_SCALAR, line, column);
|
||||
pToken->value = scalar;
|
||||
m_tokens.push(pToken);
|
||||
Token token(TT_SCALAR, line, column);
|
||||
token.value = scalar;
|
||||
m_tokens.push(token);
|
||||
}
|
||||
|
||||
// QuotedScalar
|
||||
@@ -332,9 +333,9 @@ namespace YAML
|
||||
scalar = ScanScalar(INPUT, params);
|
||||
m_simpleKeyAllowed = false;
|
||||
|
||||
Token *pToken = new Token(TT_SCALAR, line, column);
|
||||
pToken->value = scalar;
|
||||
m_tokens.push(pToken);
|
||||
Token token(TT_SCALAR, line, column);
|
||||
token.value = scalar;
|
||||
m_tokens.push(token);
|
||||
}
|
||||
|
||||
// BlockScalarToken
|
||||
@@ -397,8 +398,8 @@ namespace YAML
|
||||
// simple keys always ok after block scalars (since we're gonna start a new line anyways)
|
||||
m_simpleKeyAllowed = true;
|
||||
|
||||
Token *pToken = new Token(TT_SCALAR, line, column);
|
||||
pToken->value = scalar;
|
||||
m_tokens.push(pToken);
|
||||
Token token(TT_SCALAR, line, column);
|
||||
token.value = scalar;
|
||||
m_tokens.push(token);
|
||||
}
|
||||
}
|
||||
|
@@ -1,3 +1,4 @@
|
||||
#include "crt.h"
|
||||
#include "sequence.h"
|
||||
#include "node.h"
|
||||
#include "scanner.h"
|
||||
@@ -51,9 +52,9 @@ namespace YAML
|
||||
Clear();
|
||||
|
||||
// split based on start token
|
||||
Token *pToken = pScanner->PeekNextToken();
|
||||
Token& token = pScanner->PeekToken();
|
||||
|
||||
switch(pToken->type) {
|
||||
switch(token.type) {
|
||||
case TT_BLOCK_SEQ_START: ParseBlock(pScanner, state); break;
|
||||
case TT_BLOCK_ENTRY: ParseImplicit(pScanner, state); break;
|
||||
case TT_FLOW_SEQ_START: ParseFlow(pScanner, state); break;
|
||||
@@ -63,18 +64,18 @@ namespace YAML
|
||||
void Sequence::ParseBlock(Scanner *pScanner, const ParserState& state)
|
||||
{
|
||||
// eat start token
|
||||
pScanner->EatNextToken();
|
||||
pScanner->PopToken();
|
||||
|
||||
while(1) {
|
||||
Token *pToken = pScanner->PeekNextToken();
|
||||
if(!pToken)
|
||||
if(pScanner->IsEmpty())
|
||||
throw ParserException(-1, -1, ErrorMsg::END_OF_SEQ);
|
||||
|
||||
if(pToken->type != TT_BLOCK_ENTRY && pToken->type != TT_BLOCK_END)
|
||||
throw ParserException(pToken->line, pToken->column, ErrorMsg::END_OF_SEQ);
|
||||
Token token = pScanner->PeekToken();
|
||||
if(token.type != TT_BLOCK_ENTRY && token.type != TT_BLOCK_END)
|
||||
throw ParserException(token.line, token.column, ErrorMsg::END_OF_SEQ);
|
||||
|
||||
pScanner->PopNextToken();
|
||||
if(pToken->type == TT_BLOCK_END)
|
||||
pScanner->PopToken();
|
||||
if(token.type == TT_BLOCK_END)
|
||||
break;
|
||||
|
||||
Node *pNode = new Node;
|
||||
@@ -86,16 +87,16 @@ namespace YAML
|
||||
void Sequence::ParseImplicit(Scanner *pScanner, const ParserState& state)
|
||||
{
|
||||
while(1) {
|
||||
Token *pToken = pScanner->PeekNextToken();
|
||||
// we're actually *allowed* to have no tokens at some point
|
||||
if(!pToken)
|
||||
if(pScanner->IsEmpty())
|
||||
break;
|
||||
|
||||
// and we end at anything other than a block entry
|
||||
if(pToken->type != TT_BLOCK_ENTRY)
|
||||
Token& token = pScanner->PeekToken();
|
||||
if(token.type != TT_BLOCK_ENTRY)
|
||||
break;
|
||||
|
||||
pScanner->PopNextToken();
|
||||
pScanner->PopToken();
|
||||
|
||||
Node *pNode = new Node;
|
||||
m_data.push_back(pNode);
|
||||
@@ -106,16 +107,15 @@ namespace YAML
|
||||
void Sequence::ParseFlow(Scanner *pScanner, const ParserState& state)
|
||||
{
|
||||
// eat start token
|
||||
pScanner->EatNextToken();
|
||||
pScanner->PopToken();
|
||||
|
||||
while(1) {
|
||||
Token *pToken = pScanner->PeekNextToken();
|
||||
if(!pToken)
|
||||
if(pScanner->IsEmpty())
|
||||
throw ParserException(-1, -1, ErrorMsg::END_OF_SEQ_FLOW);
|
||||
|
||||
// first check for end
|
||||
if(pToken->type == TT_FLOW_SEQ_END) {
|
||||
pScanner->PopNextToken();
|
||||
if(pScanner->PeekToken().type == TT_FLOW_SEQ_END) {
|
||||
pScanner->PopToken();
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -125,11 +125,11 @@ namespace YAML
|
||||
pNode->Parse(pScanner, state);
|
||||
|
||||
// now eat the separator (or could be a sequence end, which we ignore - but if it's neither, then it's a bad node)
|
||||
pToken = pScanner->PeekNextToken();
|
||||
if(pToken->type == TT_FLOW_ENTRY)
|
||||
pScanner->EatNextToken();
|
||||
else if(pToken->type != TT_FLOW_SEQ_END)
|
||||
throw ParserException(pToken->line, pToken->column, ErrorMsg::END_OF_SEQ_FLOW);
|
||||
Token& token = pScanner->PeekToken();
|
||||
if(token.type == TT_FLOW_ENTRY)
|
||||
pScanner->PopToken();
|
||||
else if(token.type != TT_FLOW_SEQ_END)
|
||||
throw ParserException(token.line, token.column, ErrorMsg::END_OF_SEQ_FLOW);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -1,3 +1,4 @@
|
||||
#include "crt.h"
|
||||
#include "scanner.h"
|
||||
#include "token.h"
|
||||
#include "exceptions.h"
|
||||
@@ -39,9 +40,9 @@ namespace YAML
|
||||
key.pMapStart->status = TS_UNVERIFIED;
|
||||
|
||||
// then add the (now unverified) key
|
||||
key.pKey = new Token(TT_KEY, INPUT.line, INPUT.column);
|
||||
m_tokens.push(Token(TT_KEY, INPUT.line, INPUT.column));
|
||||
key.pKey = &m_tokens.back();
|
||||
key.pKey->status = TS_UNVERIFIED;
|
||||
m_tokens.push(key.pKey);
|
||||
|
||||
m_simpleKeys.push(key);
|
||||
}
|
||||
|
@@ -1,3 +1,4 @@
|
||||
#include "crt.h"
|
||||
#include "stream.h"
|
||||
|
||||
namespace YAML
|
||||
|
Reference in New Issue
Block a user