本文整理汇总了C++中Lexer类的典型用法代码示例。如果您正苦于以下问题:C++ Lexer类的具体用法?C++ Lexer怎么用?C++ Lexer使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Lexer类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: skipComments
void skipComments(Lexer &Lex, Token &Tok) {
while (Tok.is(tok::comment))
if (Lex.LexFromRawLexer(Tok))
return;
}
示例2: pph
void Template::Format::readFormat(Lexer & lex)
{
enum {
FO_PRODUCT = 1,
FO_UPDATEFORMAT,
FO_UPDATERESULT,
FO_REQUIREMENT,
FO_OPTION,
FO_PREAMBLE,
FO_TRANSFORMCOMMAND,
FO_TRANSFORMOPTION,
FO_REFERENCEDFILE,
FO_END
};
LexerKeyword formattags[] = {
{ "formatend", FO_END },
{ "option", FO_OPTION },
{ "preamble", FO_PREAMBLE },
{ "product", FO_PRODUCT },
{ "referencedfile", FO_REFERENCEDFILE },
{ "requirement", FO_REQUIREMENT },
{ "transformcommand", FO_TRANSFORMCOMMAND },
{ "transformoption", FO_TRANSFORMOPTION },
{ "updateformat", FO_UPDATEFORMAT },
{ "updateresult", FO_UPDATERESULT }
};
PushPopHelper pph(lex, formattags);
while (lex.isOK()) {
switch (lex.lex()) {
case FO_PRODUCT:
lex.next(true);
product = lex.getString();
break;
case FO_UPDATEFORMAT:
lex.next(true);
updateFormat = lex.getString();
break;
case FO_UPDATERESULT:
lex.next(true);
updateResult = lex.getString();
break;
case FO_REQUIREMENT:
lex.next(true);
requirements.push_back(lex.getString());
break;
case FO_PREAMBLE:
lex.next(true);
preambleNames.push_back(lex.getString());
break;
case FO_TRANSFORMCOMMAND: {
lex.next(true);
string const name = lex.getString();
lex.next(true);
setCommandFactory(*this, name, lex.getString());
break;
}
case FO_TRANSFORMOPTION: {
lex.next(true);
string const name = lex.getString();
lex.next(true);
setOptionFactory(*this, name, lex.getString());
break;
}
case FO_OPTION: {
lex.next(true);
string const name = lex.getString();
lex.next(true);
string const opt = lex.getString();
options.push_back(Option(name, opt));
break;
}
case FO_REFERENCEDFILE: {
lex.next(true);
string const format = lex.getString();
lex.next(true);
string const file = lex.getString();
referencedFiles[format].push_back(file);
break;
}
case FO_END:
return;
}
}
}
示例3: pph
void Layout::readLabelType(Lexer & lex)
{
enum {
LA_NO_LABEL = 1,
LA_MANUAL,
LA_ABOVE,
LA_CENTERED,
LA_STATIC,
LA_SENSITIVE,
LA_ENUMERATE,
LA_ITEMIZE,
LA_BIBLIO
};
LexerKeyword labelTypeTags[] = {
{ "above", LA_ABOVE },
{ "bibliography", LA_BIBLIO },
{ "centered", LA_CENTERED },
{ "enumerate", LA_ENUMERATE },
{ "itemize", LA_ITEMIZE },
{ "manual", LA_MANUAL },
{ "no_label", LA_NO_LABEL },
{ "sensitive", LA_SENSITIVE },
{ "static", LA_STATIC }
};
PushPopHelper pph(lex, labelTypeTags);
int le = lex.lex();
switch (le) {
case Lexer::LEX_UNDEF:
lex.printError("Unknown labeltype tag `$$Token'");
return;
default: break;
}
switch (le) {
case LA_NO_LABEL:
labeltype = LABEL_NO_LABEL;
break;
case LA_MANUAL:
labeltype = LABEL_MANUAL;
break;
case LA_ABOVE:
labeltype = LABEL_ABOVE;
break;
case LA_CENTERED:
labeltype = LABEL_CENTERED;
break;
case LA_STATIC:
labeltype = LABEL_STATIC;
break;
case LA_SENSITIVE:
labeltype = LABEL_SENSITIVE;
break;
case LA_ENUMERATE:
labeltype = LABEL_ENUMERATE;
break;
case LA_ITEMIZE:
labeltype = LABEL_ITEMIZE;
break;
case LA_BIBLIO:
labeltype = LABEL_BIBLIO;
break;
}
}
示例4: prim
double Calculator::prim(Lexer& lexer)
{
Token tok = lexer.peek();
switch(tok.Type)
{
case Token::FUNC:
lexer.pop();
return func(lexer);
case Token::NUMBER:
return lexer.pop().NumberValue;
case Token::NAME:
{
string name = tok.StringValue;
lexer.pop();
if(lexer.peek().Type == Token::LP)
{
lexer.pop();
return call(lexer, name);
}
else if (lexer.peek().Type == Token::ASSIGN)
{
lexer.pop();
SymEntry* sym = symbols.Get(name);
if(sym && sym->type != SymEntry::VAR) throw Error::SyntaxError("not a variable: "+name);
double v = expr(lexer);
symbols.Set(name, new SymVariable(v));
return v;
}
else
{
SymEntry* sym = symbols.Get(name);
if(!sym) throw Error::SyntaxError("undefined symbol: "+name);
if(sym->type != SymEntry::VAR) throw Error::SyntaxError("not a variable: "+name);
SymVariable* sVar = static_cast<SymVariable*>(sym);
return sVar->value;
}
}
case Token::MINUS:
lexer.pop();
return -prim(lexer);
case Token::LP:
{
lexer.pop();
double e = expr(lexer);
Token tok = lexer.pop();
if(tok.Type != Token::RP) throw Error::SyntaxError("')' expected");
return e;
}
default:
throw Error::SyntaxError("primary expected");
}
}
示例5: while
// Expand __has_include and __has_include_next if possible. If there's no
// definitive answer return false.
bool InclusionRewriter::HandleHasInclude(
FileID FileId, Lexer &RawLex, const DirectoryLookup *Lookup, Token &Tok,
bool &FileExists) {
// Lex the opening paren.
RawLex.LexFromRawLexer(Tok);
if (Tok.isNot(tok::l_paren))
return false;
RawLex.LexFromRawLexer(Tok);
SmallString<128> FilenameBuffer;
StringRef Filename;
// Since the raw lexer doesn't give us angle_literals we have to parse them
// ourselves.
// FIXME: What to do if the file name is a macro?
if (Tok.is(tok::less)) {
RawLex.LexFromRawLexer(Tok);
FilenameBuffer += '<';
do {
if (Tok.is(tok::eod)) // Sanity check.
return false;
if (Tok.is(tok::raw_identifier))
PP.LookUpIdentifierInfo(Tok);
// Get the string piece.
SmallVector<char, 128> TmpBuffer;
bool Invalid = false;
StringRef TmpName = PP.getSpelling(Tok, TmpBuffer, &Invalid);
if (Invalid)
return false;
FilenameBuffer += TmpName;
RawLex.LexFromRawLexer(Tok);
} while (Tok.isNot(tok::greater));
FilenameBuffer += '>';
Filename = FilenameBuffer;
} else {
if (Tok.isNot(tok::string_literal))
return false;
bool Invalid = false;
Filename = PP.getSpelling(Tok, FilenameBuffer, &Invalid);
if (Invalid)
return false;
}
// Lex the closing paren.
RawLex.LexFromRawLexer(Tok);
if (Tok.isNot(tok::r_paren))
return false;
// Now ask HeaderInfo if it knows about the header.
// FIXME: Subframeworks aren't handled here. Do we care?
bool isAngled = PP.GetIncludeFilenameSpelling(Tok.getLocation(), Filename);
const DirectoryLookup *CurDir;
const FileEntry *File = PP.getHeaderSearchInfo().LookupFile(
Filename, SourceLocation(), isAngled, nullptr, CurDir,
PP.getSourceManager().getFileEntryForID(FileId), nullptr, nullptr,
nullptr, false);
FileExists = File != nullptr;
return true;
}
示例6: while
bool Parser::parseExpression (Lexer& lexer, Lookup& lookup, Int32u* slot, const Extractor** output)
{
BinaryOp binaryOp;
stack<BinaryOp> binaryOps;
stack<const Extractor*> operands;
const Extractor* value;
while (true)
{
switch (lexer.getType ())
{
case Lexer::PLUS:
lexer.next ();
if (!this->parseExpression (lexer, lookup, slot, &value))
return false;
break;
case Lexer::MINUS:
lexer.next ();
if (!this->parseExpression (lexer, lookup, slot, &value))
return false;
value = new NumberUnaryExtractor (value, [] (Float64 number)
{
return Variant (-number);
});
this->extractors.push_back (value);
break;
case Lexer::NOT:
lexer.next ();
if (!this->parseExpression (lexer, lookup, slot, &value))
return false;
value = new BooleanUnaryExtractor (value, [] (bool value)
{
return Variant (!value);
});
this->extractors.push_back (value);
break;
case Lexer::PARENTHESIS_BEGIN:
lexer.next ();
if (!this->parseExpression (lexer, lookup, slot, &value) ||
!this->parseType (lexer, Lexer::PARENTHESIS_END, "closing parenthesis"))
return false;
break;
default:
if (!this->parseValue (lexer, lookup, slot, &value))
return false;
break;
}
operands.push (value);
switch (lexer.getType ())
{
case Lexer::AMPERSAND:
binaryOp = make_pair (1, [] (const Extractor* lhs, const Extractor* rhs) -> Extractor*
{
return new AndLogicalExtractor (lhs, rhs);
});
break;
case Lexer::DIFFERENT:
binaryOp = make_pair (2, [] (const Extractor* lhs, const Extractor* rhs) -> Extractor*
{
return new VariantBinaryExtractor (lhs, rhs, [] (const Variant& a, const Variant& b)
{
return Variant (a != b);
});
});
break;
case Lexer::DIVIDE:
binaryOp = make_pair (4, [] (const Extractor* lhs, const Extractor* rhs) -> Extractor*
{
return new NumberBinaryExtractor (lhs, rhs, [] (Float64 a, Float64 b)
{
return b != 0 ? Variant (a / b) : Variant::empty;
});
});
break;
case Lexer::EQUAL:
//.........这里部分代码省略.........
示例7: main
int main(int argc, const char** argv) {
auto params = getParams(argc, argv);
auto exit_with_errors = [](int error_count) {
cout << "Exiting with " << error_count << (error_count == 1 ? " error" : " errors") << endl;
};
Lexer lexer;
vector<Token> tokens;
int error_count = 0;
if (paramIsSet(params, "evaluate")) {
auto str = params["evaluate"][0];
istringstream stream {str};
tie(tokens, error_count) = lexer.tokenize(stream, "(command line)");
} else if (paramIsSet(params, "files")) {
auto filename = params["files"][0];
ifstream file {filename};
if (file.is_open()) {
tie(tokens, error_count) = lexer.tokenize(file, filename);
} else {
++error_count;
cerr << "ERROR: " << filename << " not found" << endl;
}
} else {
tie(tokens, error_count) = lexer.tokenize(cin, "(stdin)");
}
bool ignore_errors = paramIsSet(params, "ignore-errors");
if (!ignore_errors && error_count > 0) {
exit_with_errors(error_count);
return -1;
}
if (tokens.empty()) {
return 0;
}
if (paramIsSet(params, "print-tokens")) {
print_tokens(tokens);
}
TokenStream token_stream {begin(tokens), end(tokens), true};
Parser parser;
shared_ptr<ASTNode> tree;
setupGlobalScope();
tie(tree, error_count) = parser.parse(token_stream);
if (!ignore_errors && error_count > 0) {
exit_with_errors(error_count);
return -1;
}
bool print_ast = paramIsSet(params, "print-ast");
if (tree) {
if (print_ast) {
cout << "\nOutput: " << endl;
tree->output(cout, 0);
cout << endl;
}
if (print_ast || ignore_errors) {
cout << "\nEvaluate: " << endl;
}
try {
auto eval = tree->evaluate();
if (eval) {
eval->output(cout);
}
} catch (const exception& ex) {
++error_count;
cerr << ex.what() << endl;
}
cout << endl;
} else if (print_ast) {
cout << "No parse tree produced" << endl;
}
return 0;
}
示例8: current_time
void ConverterCache::Impl::readIndex()
{
time_t const now = current_time();
FileName const index(addName(cache_dir.absFileName(), "index"));
ifstream is(index.toFilesystemEncoding().c_str());
Lexer lex;
lex.setStream(is);
while (lex.isOK()) {
if (!lex.next(true))
break;
string const orig_from = lex.getString();
if (!lex.next())
break;
string const to_format = lex.getString();
if (!lex.next())
break;
time_t const timestamp =
convert<unsigned long>(lex.getString());
if (!lex.next())
break;
unsigned long const checksum =
convert<unsigned long>(lex.getString());
FileName const orig_from_name(orig_from);
CacheItem item(orig_from_name, to_format, timestamp, checksum);
// Don't cache files that do not exist anymore
if (!orig_from_name.exists()) {
LYXERR(Debug::FILES, "Not caching file `"
<< orig_from << "' (does not exist anymore).");
item.cache_name.removeFile();
continue;
}
// Don't add items that are not in the cache anymore
// This can happen if two instances of LyX are running
// at the same time and update the index file independantly.
if (!item.cache_name.exists()) {
LYXERR(Debug::FILES, "Not caching file `" << orig_from
<< "' (cached copy does not exist anymore).");
continue;
}
// Delete the cached file if it is too old
if (difftime(now, item.cache_name.lastModified())
> lyxrc.converter_cache_maxage) {
LYXERR(Debug::FILES, "Not caching file `"
<< orig_from << "' (too old).");
item.cache_name.removeFile();
continue;
}
FormatCache & format_cache = cache[orig_from_name];
if (format_cache.from_format.empty())
format_cache.from_format =
formats.getFormatFromFile(orig_from_name);
format_cache.cache[to_format] = item;
}
is.close();
}
示例9: lexer_ptr
Parser::Parser(Lexer &m_lexer): lexer_ptr(nullptr), lexer( m_lexer ), meta(m_lexer.size())
{
parseToken( lexer, meta );
}
示例10: is
bool Font::fromString(string const & data, bool & toggle)
{
istringstream is(data);
Lexer lex;
lex.setStream(is);
int nset = 0;
while (lex.isOK()) {
string token;
if (lex.next())
token = lex.getString();
if (token.empty() || !lex.next())
break;
if (token == "family") {
int const next = lex.getInteger();
bits_.setFamily(FontFamily(next));
} else if (token == "series") {
int const next = lex.getInteger();
bits_.setSeries(FontSeries(next));
} else if (token == "shape") {
int const next = lex.getInteger();
bits_.setShape(FontShape(next));
} else if (token == "size") {
int const next = lex.getInteger();
bits_.setSize(FontSize(next));
} else if (token == "emph" || token == "underbar" ||
token == "noun" || token == "number" ||
token == "uuline" || token == "uwave" ||
token == "strikeout") {
int const next = lex.getInteger();
FontState const misc = FontState(next);
if (token == "emph")
bits_.setEmph(misc);
else if (token == "underbar")
bits_.setUnderbar(misc);
else if (token == "strikeout")
bits_.setStrikeout(misc);
else if (token == "uuline")
bits_.setUuline(misc);
else if (token == "uwave")
bits_.setUwave(misc);
else if (token == "noun")
bits_.setNoun(misc);
else if (token == "number")
bits_.setNumber(misc);
} else if (token == "color") {
int const next = lex.getInteger();
bits_.setColor(ColorCode(next));
/**
} else if (token == "background") {
int const next = lex.getInteger();
bits_.setBackground(ColorCode(next));
*/
} else if (token == "language") {
string const next = lex.getString();
setLanguage(languages.getLanguage(next));
} else if (token == "toggleall") {
toggle = lex.getBool();
} else {
// Unrecognised token
break;
}
++nset;
}
return (nset > 0);
}
示例11: Idents
MinimalAction::MinimalAction(Lexer &l)
: Idents(l.getIdentifierTable()) {
TypeNameInfoTablePtr = new TypeNameInfoTable();
}
示例12: ReadMesh
/*
================
ReadMesh
================
*/
static void ReadMesh( Lexer &lexer, aseMesh *inMesh ) {
int idx;
Vec3 temp;
lexer.ExpectToken("{");
const char *p;
const Token *token;
while ( (token = lexer.ReadToken()) != OG_NULL ) {
p = token->GetString();
if ( !p || !*p )
continue;
if ( String::Icmp( p, "}" ) == 0 )
return;
if ( String::Icmp( p, "*" ) != 0 )
lexer.Error( Format("expected *, got '$*'") << p );
if ( lexer.CheckToken( "MESH_NUMVERTEX" ) ) {
inMesh->numVerts = lexer.ReadInt();
inMesh->vertices = new aseVertex[inMesh->numVerts];
}
else if ( lexer.CheckToken( "MESH_NUMFACES" ) ) {
inMesh->numTris = lexer.ReadInt();
inMesh->triangles = new aseTriangle[inMesh->numTris];
}
else if ( lexer.CheckToken( "MESH_VERTEX_LIST" ) ) {
lexer.ExpectToken("{");
for( int i=0; i<inMesh->numVerts; i++ ) {
lexer.ExpectToken("*");
lexer.ExpectToken("MESH_VERTEX");
idx = lexer.ReadInt();
inMesh->vertices[idx].origin.x = lexer.ReadFloat() * ASE_MODEL_SCALE;
inMesh->vertices[idx].origin.y = lexer.ReadFloat() * ASE_MODEL_SCALE;
inMesh->vertices[idx].origin.z = lexer.ReadFloat() * ASE_MODEL_SCALE;
}
lexer.ExpectToken("}");
}
else if ( lexer.CheckToken( "MESH_NORMALS" ) ) {
lexer.ExpectToken("{");
for( ;; ) {
if ( lexer.CheckToken("}") )
break;
lexer.ExpectToken("*");
// don't need the face normal
if ( lexer.CheckToken("MESH_FACENORMAL") ) {
lexer.GotoNextLine();
continue;
}
lexer.ExpectToken("MESH_VERTEXNORMAL");
idx = lexer.ReadInt();
inMesh->vertices[idx].normal.x = lexer.ReadFloat();
inMesh->vertices[idx].normal.y = lexer.ReadFloat();
inMesh->vertices[idx].normal.z = lexer.ReadFloat();
}
}
else if ( lexer.CheckToken( "MESH_FACE_LIST" ) ) {
lexer.ExpectToken("{");
for( int i=0; i<inMesh->numTris; i++ ) {
lexer.ExpectToken("*");
lexer.ExpectToken("MESH_FACE");
idx = lexer.ReadInt();
lexer.CheckToken(":"); // might or might not be there
lexer.ExpectToken("A");
lexer.ExpectToken(":");
inMesh->triangles[idx].v[0] = lexer.ReadInt();
lexer.ExpectToken("B");
lexer.ExpectToken(":");
inMesh->triangles[idx].v[1] = lexer.ReadInt();
lexer.ExpectToken("C");
lexer.ExpectToken(":");
inMesh->triangles[idx].v[2] = lexer.ReadInt();
lexer.GotoNextLine();
}
lexer.ExpectToken("}");
}
else if ( lexer.CheckToken( "MESH_NUMTVERTEX" ) ) {
inMesh->numTVerts = lexer.ReadInt();
inMesh->texCoords = new Vec2[inMesh->numTVerts];
}
else if ( lexer.CheckToken( "MESH_TVERTLIST" ) ) {
lexer.ExpectToken("{");
for( int i=0; i<inMesh->numTVerts; i++ ) {
lexer.ExpectToken("*");
lexer.ExpectToken("MESH_TVERT");
idx = lexer.ReadInt();
inMesh->texCoords[idx].x = lexer.ReadFloat();
inMesh->texCoords[idx].y = 1.0f-lexer.ReadFloat();
lexer.ReadFloat();// don't need 3rd component
}
lexer.ExpectToken("}");
}
else if ( lexer.CheckToken( "MESH_NUMTVFACES" ) )
lexer.ExpectToken( Format() << inMesh->numTris );
else if ( lexer.CheckToken( "MESH_TFACELIST" ) ) {
lexer.ExpectToken("{");
for( int i=0; i<inMesh->numTris; i++ ) {
lexer.ExpectToken("*");
//.........这里部分代码省略.........
示例13: get_root_node
inline expr_ptr get_root_node( Lexer & lex )
{
return convert_token_to_expression( lex.get_token() );
}
示例14: while
bool InsetLayout::read(Lexer & lex, TextClass const & tclass)
{
enum {
IL_BGCOLOR,
IL_CONTENTASLABEL,
IL_COPYSTYLE,
IL_COUNTER,
IL_CUSTOMPARS,
IL_DECORATION,
IL_FONT,
IL_FORCELTR,
IL_FORCEPLAIN,
IL_FREESPACING,
IL_HTMLTAG,
IL_HTMLATTR,
IL_HTMLFORCECSS,
IL_HTMLINNERTAG,
IL_HTMLINNERATTR,
IL_HTMLISBLOCK,
IL_HTMLLABEL,
IL_HTMLSTYLE,
IL_HTMLPREAMBLE,
IL_INTOC,
IL_LABELFONT,
IL_LABELSTRING,
IL_LATEXNAME,
IL_LATEXPARAM,
IL_LATEXTYPE,
IL_LYXTYPE,
IL_KEEPEMPTY,
IL_MULTIPAR,
IL_NEEDPROTECT,
IL_PASSTHRU,
IL_PARBREAKISNEWLINE,
IL_PREAMBLE,
IL_REQUIRES,
IL_SPELLCHECK,
IL_REFPREFIX,
IL_END
};
LexerKeyword elementTags[] = {
{ "bgcolor", IL_BGCOLOR },
{ "contentaslabel", IL_CONTENTASLABEL },
{ "copystyle", IL_COPYSTYLE },
{ "counter", IL_COUNTER},
{ "custompars", IL_CUSTOMPARS },
{ "decoration", IL_DECORATION },
{ "end", IL_END },
{ "font", IL_FONT },
{ "forceltr", IL_FORCELTR },
{ "forceplain", IL_FORCEPLAIN },
{ "freespacing", IL_FREESPACING },
{ "htmlattr", IL_HTMLATTR },
{ "htmlforcecss", IL_HTMLFORCECSS },
{ "htmlinnerattr", IL_HTMLINNERATTR},
{ "htmlinnertag", IL_HTMLINNERTAG},
{ "htmlisblock", IL_HTMLISBLOCK},
{ "htmllabel", IL_HTMLLABEL },
{ "htmlpreamble", IL_HTMLPREAMBLE },
{ "htmlstyle", IL_HTMLSTYLE },
{ "htmltag", IL_HTMLTAG },
{ "intoc", IL_INTOC },
{ "keepempty", IL_KEEPEMPTY },
{ "labelfont", IL_LABELFONT },
{ "labelstring", IL_LABELSTRING },
{ "latexname", IL_LATEXNAME },
{ "latexparam", IL_LATEXPARAM },
{ "latextype", IL_LATEXTYPE },
{ "lyxtype", IL_LYXTYPE },
{ "multipar", IL_MULTIPAR },
{ "needprotect", IL_NEEDPROTECT },
{ "parbreakisnewline", IL_PARBREAKISNEWLINE },
{ "passthru", IL_PASSTHRU },
{ "preamble", IL_PREAMBLE },
{ "refprefix", IL_REFPREFIX },
{ "requires", IL_REQUIRES },
{ "spellcheck", IL_SPELLCHECK }
};
lex.pushTable(elementTags);
FontInfo font = inherit_font;
labelfont_ = inherit_font;
bgcolor_ = Color_none;
bool getout = false;
// whether we've read the CustomPars or ForcePlain tag
// for issuing a warning in case MultiPars comes later
bool readCustomOrPlain = false;
string tmp;
while (!getout && lex.isOK()) {
int le = lex.lex();
switch (le) {
case Lexer::LEX_UNDEF:
lex.printError("Unknown InsetLayout tag");
continue;
default:
break;
//.........这里部分代码省略.........
示例15: throw
long double Calculation::Resolve(const string& str) const
throw(invalid_argument)
{
if (!CheckParenthesis(str))
throw invalid_argument("The number of left parenthesis is not equal with"
" the number of right parenthesis");
/*
* TODO
* To test special cases.
* 5 + * 2
* 5 + 2 *
* * 5 + 2
*
* 5 (2 + 3)
* (2 + 3) 5
*
* 5 2 * 3
* 5 sin(90)
*
* sin(90) cos(90)
* sin 90 cos(45)
*
* To test if there are the functions that are called.
*/
Lexer l;
list<Token*> tokens = l.Split(str);
long double r;
if (l.PrintErrors())
{
throw invalid_argument("Invalid input");
}
if (tokens.size() == 0)
{
throw invalid_argument("No tokens");
}
AddZero(tokens);
try
{
r = Resolve(tokens);
for (auto it = tokens.begin(); it != tokens.end(); ++it)
delete (*it);
return r;
}
catch (invalid_argument& e)
{
for (auto it = tokens.begin(); it != tokens.end(); ++it)
delete (*it);
throw e;
}
}