本文整理汇总了C++中Tokenizer::isComplete方法的典型用法代码示例。如果您正苦于以下问题:C++ Tokenizer::isComplete方法的具体用法?C++ Tokenizer::isComplete怎么用?C++ Tokenizer::isComplete使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Tokenizer
的用法示例。
在下文中一共展示了Tokenizer::isComplete方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: main
//Example Test code for interacting with your Token, TokenList, and Tokenizer classes
//Add your own code to further test the operation of your Token, TokenList, and Tokenizer classes
int main() {
ifstream sourceFile;
TokenList tokens;
Tokenizer tokenizer;
//Read in a file line-by-line and tokenize each line
sourceFile.open("test.cpp");
if (!sourceFile.is_open()) {
cout << "Failed to open file" << endl;
return 1;
}
while (!sourceFile.eof()) {
string lineA, lineB;
getline(sourceFile, lineA);
//while the current line ends with a line-continuation \ append the next line to the current line
while (lineA.length() > 0 && lineA[lineA.length() - 1] == '\\') {
lineA.erase(lineA.length() - 1, 1);
getline(sourceFile, lineB);
lineA += lineB;
}
tokenizer.setString(&lineA);
while (!tokenizer.isComplete()) {
tokens.append(tokenizer.getNextToken());
}
//Re-insert newline that was removed by the getline function
tokens.append("\n");
}
cout << "Inline comments removed: " << removeInlineComments(tokens) << endl;
cout << "Block comments removed: " << removeBlockComments(tokens) << endl;
/*Test your tokenization of the file by traversing the tokens list and printing out the tokens*/
Token *t = tokens.getFirst();
// while (t) {
// cout << t->getStringRep() << endl;
// t = t->getNext();
// }
for (int i = 0; t; i++) {
cout << t->getStringRep() << " ";
t = t->getNext();
}
return 0;
}
示例2: main
//Example Test code for interacting with your Token, TokenList, and Tokenizer classes
//Add your own code to further test the operation of your Token, TokenList, and Tokenizer classes
int main() {
ifstream sourceFile;
TokenList tokens;
Tokenizer tokenizer;
// Setting default function detectors to standard types and const, short long, signed, unsigned qualifiers
tokens.tableOfFunctionDetectors[0] = ")";
for (int i = 1; i <= 11; i++)
{
tokens.tableOfFunctionDetectors[i] = ensc251::tableOfKeywords[i];
}
tokens.tableOfFunctionDetectors[11] = "string";
int typeindex = 12;
//Read in a file line-by-line and tokenize each line
sourceFile.open("test.cpp");
if (!sourceFile.is_open()) {
cout << "Failed to open file" << endl;
return 1;
}
while (!sourceFile.eof()) {
string lineA, lineB;
getline(sourceFile, lineA);
//while the current line ends with a line-continuation \ append the next line to the current line
while (lineA.length() > 0 && lineA[lineA.length() - 1] == '\\') {
lineA.erase(lineA.length() - 1, 1);
getline(sourceFile, lineB);
lineA += lineB;
}
tokenizer.setString(&lineA);
while (!tokenizer.isComplete()) {
tokens.append(tokenizer.getNextToken());
}
//Re-insert newline that was removed by the getline function
tokens.append("\n");
}
//comment out for keeping comments in code
cout << "Inline comments removed: " << removeInlineComments(tokens) << endl;
cout << "Block comments removed: " << removeBlockComments(tokens) << endl;
/*Test your tokenization of the file by traversing the tokens list and printing out the tokens*/
Token *t = tokens.getFirst();
cout << "************************* Tokens Printed With Types *************************" << endl << endl << endl;
while (t) {
cout << t->getStringRep() << " ";
if (t->getStringRep() != "\n")
{
printType(t->getStringType()); //--------comment at beginning of line to remove token types after each token--------------
cout << " ";
}
t = t->getNext();
numTokensParsed++; //Counts how many tokens there are in the whole cpp file
}
//Test your assignment statements (prints assignment statements at the end)
cout << endl << endl << "*************************** Assignment Statements ***************************" << endl << endl << endl;
Token *aListPtr = getAssignmentStatements(tokens);
classifyIdentifiers(tokens);
while(aListPtr) {
cout << aListPtr->getStringRep() << " " << aListPtr->getIdentifierType();
if (aListPtr->getStringRep() == ";") //separate assignment statements by new lines
{
cout << endl;
}
aListPtr = aListPtr->getNext();
}
cout << endl << endl << "*************************** Function Declarations ***************************" << endl << endl << endl;
Token *FuncListPtr = getFunctionDeclarations(tokens);
while(FuncListPtr) {
cout << FuncListPtr->getStringRep() << " ";
if (FuncListPtr->getStringRep() == ";") //separate assignment statements by new lines
{
cout << endl;
}
FuncListPtr = FuncListPtr->getNext();
}
cout << endl << endl << "********************************** Errors ***********************************" << endl << endl;
cout << "There are: " << typeMismatchChecker(assigstats) << " type mismatch error(s).\n";
cout << "There are: " << bracketMismatchChecker(assigstats) << " bracket mismatch error(s).\n";
int choice = 0;
cout << endl << endl << endl << endl << "********************* Welcome to Amar and Cem's Parser **********************" << endl;
cout << "To begin, choose a mode: " << endl << "(1) Non-Verbose" << endl << "(2) Verbose" << endl << endl;
cout << "Choice: ";
while (!(cin >> choice) || (choice < 1 || choice > 2))
{
//if an invalid character is read in
cin.clear();
cin.ignore(numeric_limits<streamsize>::max(),'\n');
cout << "Invalid entry; please try again: " << endl << "Choice: ";
}
//.........这里部分代码省略.........
示例3: main
//Example Test code for interacting with your Token, TokenList, and Tokenizer classes
//Add your own code to further test the operation of your Token, TokenList, and Tokenizer classes
int main(int argc, char *argv[]) {
ifstream sourceFile;
TokenList tokens;
//Lists for types of tokens
TokenList operatorTokens;
TokenList identifierTokens;
TokenList literalTokens;
TokenList commentBodyTokens;
TokenList otherTokens;
Tokenizer tokenizer;
//Read in a file line-by-line and tokenize each line
cout << "File: " << argv[1] << endl;
sourceFile.open(argv[1]);
if (!sourceFile.is_open()) {
cout << "Failed to open file" << endl;
return 1;
}
while(!sourceFile.eof()) {
string line;
getline(sourceFile, line);
tokenizer.setString(&line);
while(!tokenizer.isComplete()) {
tokens.append(tokenizer.getNextToken());
}
}
// Set token data
Token *t = tokens.getFirst();
while(t) {
tokens.findAndSetTokenDetails(t);
t = t->getNext();
}
// Print a list of tokens and all data
tokens.print(20);
TokenList *conditionalTokens = findAllConditionalExpressions(tokens);
cout << "\n\nConditional tokens list: \n";
conditionalTokens->print(20);
//tokens.print(20);
removeTokensOfType(tokens, T_Other);
cout << "No Others: \n";
tokens.print(20);
/* For your testing purposes only */
/* Ensure that tokens have all type information set*/
/* Create operator,identifier,literal, etc. tokenLists from the master list of tokens */
return 0;
}
示例4: main
//Example Test code for interacting with your Token, TokenList, and Tokenizer classes
//Add your own code to further test the operation of your Token, TokenList, and Tokenizer classes
int main() {
ifstream sourceFile;
TokenList tokens;
//Lists for types of tokens
TokenList operatorTokens;
TokenList identifierTokens;
TokenList literalTokens;
TokenList commentBodyTokens;
TokenList otherTokens;
TokenList keywordTkoens;
Tokenizer tokenizer;
//Read in a file line-by-line and tokenize each line
sourceFile.open("/Users/arlene/Desktop/251Final/251Final/test.vhd");
if (!sourceFile.is_open()) {
cout << "Failed to open file" << endl;
return 1;
}
while(!sourceFile.eof()) {
string line;
getline(sourceFile, line);
tokenizer.setString(&line);
while(!tokenizer.isComplete()) {
tokens.append(tokenizer.getNextToken());
}
}
/*Test your tokenization of the file by traversing the tokens list and printing out the tokens*/
Token *t = tokens.getFirst();
while(t) {
cout << t->getStringRep() << " ";
t = t->getNext();
}
//--------------------------------------test code for part 1-------------------------------------------
/*Token *t = tokens.getFirst();
while(t){
tokens.findAndSetTokenDetails(t);
if (t->isOperator()){
Token *temp = new Token(*t);
temp->setNext(nullptr);
operatorTokens.append(temp);
}
if (t->isLiteral()){
Token *temp = new Token(*t);
temp->setNext(nullptr);
literalTokens.append(temp);
}
if (t->isKeyword()){
Token *temp = new Token(*t);
temp->setNext(nullptr);
keywordTkoens.append(temp);
}
if (t->isComment()){
Token *temp = new Token(*t);
temp->setNext(nullptr);
commentBodyTokens.append(temp);
}
if (t->isIdentifier()){
Token *temp = new Token(*t);
temp->setNext(nullptr);
identifierTokens.append(temp);
}
if (t->isOther()){
Token *temp = new Token(*t);
temp->setNext(nullptr);
otherTokens.append(temp);
}
t = t->getNext();
}
t = identifierTokens.getFirst();
while(t){
cout << t->getStringRep() << " ";
if (t->getTokenDetails() != nullptr){
cout << t->getTokenDetails()->width << " ";
cout << t->getTokenDetails()->type << " "<<endl;}
t = t->getNext();}*/
//---------------------------------------test remove Token Type-----------------------------------------
// cout << removeTokensOfType(tokens, T_CommentBody) <<endl;
//---------------------------------------test removeComments--------------------------------------------
/*removeComments(tokens);
t=tokens.getFirst();
while(t){
cout << t->getStringRep() << " ";
t = t->getNext();
}*/
//.........这里部分代码省略.........
示例5: main
//Example Test code for interacting with your Token, TokenList, and Tokenizer classes
//Add your own code to further test the operation of your Token, TokenList, and Tokenizer classes
int main() {
ifstream sourceFile;
TokenList tokens;
Tokenizer tokenizer;
//Read in a file line-by-line and tokenize each line
sourceFile.open("test.cpp");
if (!sourceFile.is_open()) {
cout << "Failed to open file" << endl;
return 1;
}
while (!sourceFile.eof()) {
string lineA, lineB;
getline(sourceFile, lineA);
//while the current line ends with a line-continuation \ append the next line to the current line
while (lineA.length() > 0 && lineA[lineA.length() - 1] == '\\') {
lineA.erase(lineA.length() - 1, 1);
getline(sourceFile, lineB);
lineA += lineB;
}
tokenizer.setString(&lineA);
while (!tokenizer.isComplete()) {
tokens.append(tokenizer.getNextToken());
}
//Re-insert newline that was removed by the getline function
tokens.append("\n");
}
removeInlineComments(tokens);
removeBlockComments(tokens);
//asks the user what mode that they want
char userResponse;
cout << "Input the number corresponding to the type you want to run" << endl;
cout << "Non-Verbose = 1" << endl << "Verbose = 2" << endl << "Exit = 3" << endl;
cin >> userResponse;
//enter non verbose mode
if (userResponse == '1') {
//Number of assignment Statements
ensc251::setsUserDataTypes(tokens.getFirst());
ensc251::setDataTypeList(tokens.getFirst());
Token *aListPtr = getAssignmentStatements(tokens);
Token* tempAList = aListPtr;
int numOfAStatements = 0;
while (tempAList) {
if (tempAList->getStringRep() == ";") {
numOfAStatements++;
}
tempAList = tempAList->getNext();
}
//prints out the assignment statements and its following errors
cout << "---------------NON VERBOSE OUTPUT----------------------" << endl;
cout << "The number of assignment statements: " << numOfAStatements << endl;
cout << "The number of assignment statements with unmatched types: " << dataTypes::numberOfUnmatchedTypes(aListPtr) << endl;
cout << "The number of assignment statements with unmatched braces: " << dataTypes::numberOfUnmatchedBraces(aListPtr) << endl;
//number of function declarations
Token* fDec = dataTypes::setFunctionDeclarations(tokens.getFirst());
Token* tempFDec = fDec;
int numOfFDecs = 0;
while (tempFDec) {
if (tempFDec->getStringRep() == ";") {
numOfFDecs++;
}
tempFDec = tempFDec->getNext();
}
cout << "The number of function declarations: " << numOfFDecs << endl;
// total number of tokens in the program ---------------------------------------------
Token* tcount = tokens.getFirst();
int numOftokens = 0;
while (tcount) {
numOftokens++;
tcount = tcount->getNext();
}
cout << "The number of tokens: " << numOftokens << endl;
// total # of token types in the program
Token* tokentype = tokens.getFirst();
int lexicaltypecount[9] = {};
while (tokentype) {
using namespace ensc251;
//0
if (tokentype->getStringType() == T_Identifier){
lexicaltypecount[0]++;
}
//1
if (tokentype->getStringType() == T_Operator){
lexicaltypecount[1]++;
//.........这里部分代码省略.........
示例6: main
int main() {
ifstream sourceFile;
TokenList tokens;
//Lists for types of tokens
TokenList operatorTokens;
TokenList identifierTokens;
TokenList literalTokens;
TokenList commentBodyTokens;
TokenList otherTokens;
Tokenizer tokenizer;
bool verboseModeFlag = false;
string userInput;
char parsedUserInput;
vector<string> errorLines;
string errorMissingEndIf = "Missing \"End If\" here: ";
string errorMissingThen = "Missing \"Then\" here: ";
string errorMissingIf = "Extra \"End If\" Here: ";
string errorExtraThen = "Extra \"Then\" here: ";
string errorTypeMismatch = "Type Mismatch of types : ";
string errorWidthMismatch = "Width Mismatch: ";
int numberOfTokens =0;
int numberOfCondExp =0;
int numberOfMissingEndIfs = 0;
int numberOfMissingIfs =0;
int numberofMissingThens =0;
int numberofMissingProcess =0;
int numberofMissingEndProcess=0;
int numberofMissingOpenBracket =0;
int numberofMissingCloseBracket =0;
int ifEndifBalance =0; //if there is a positive number there are too many ifs, negative number too many endifs.
int ifthenBalance =0; //Like above except with Then.
int processBalance =0; // like above except with process - end process
int BracketBalance =0; // check the missing bracket
//Read in a file line-by-line and tokenize each line
cout << "Enter the name of the file to open: ";
cin >> userInput;
sourceFile.open(userInput);
if (!sourceFile.is_open()) {
cout << "Failed to open file" << endl;
return 1;
}
while(!sourceFile.eof()) {
string line;
getline(sourceFile, line);
tokenizer.setString(&line);
while(!tokenizer.isComplete()) {
tokens.append(tokenizer.getNextToken());
}
}
///removeComments(tokens); ///test remove comment
Token *t = tokens.getFirst();
while(t)
{
tokens.findAndSetTokenDetails(t); ///test findAndSetTokenDetails
/*if(t->getTokenType() == 1 || t->getTokenType() == 2)
{
detailtoken = *(t->getTokenDetails());
//cout << t->getStringRep() << " Token Type: " << t->getTokenType() <<" Token Detail: " << detailtoken.type << " Token Width: " << detailtoken.width <<endl;
t = t->getNext();
}
else
{
//cout << t->getStringRep() << " Token Type: " << t->getTokenType() <<endl;
t = t->getNext();
}
*/
t = t->getNext();
}
cout << "Enter Verbose Mode? Type \"1\" for Yes, Other inputs will be a No. : ";
cin >> userInput;
parsedUserInput = userInput[0];
if (parsedUserInput == '1')
{
verboseModeFlag = true;
}
//This part counts the number of tokens.
t = tokens.getFirst();
while(t)
{
numberOfTokens++;
t = t->getNext();
}
//This part counts the number of conditional expressions.
//.........这里部分代码省略.........
示例7: main
//Example Test code for interacting with your Token, TokenList, and Tokenizer classes
//Add your own code to further test the operation of your Token, TokenList, and Tokenizer classes
int main() {
ifstream sourceFile;
TokenList tokens;
Tokenizer tokenizer;
//Read in a file line-by-line and tokenize each line
sourceFile.open("test.vhd");
if (!sourceFile.is_open()) {
cout << "Failed to open file" << endl;
return 1;
}
while(!sourceFile.eof()) {
string lineA, lineB;
getline(sourceFile, lineA);
//while the current line ends with a line-continuation \ append the next line to the current line
while(lineA.length() > 0 && lineA[lineA.length()-1] == '\\') {
lineA.erase(lineA.length()-1, 1);
getline(sourceFile, lineB);
lineA += lineB;
}
tokenizer.setString(&lineA);
while( !tokenizer.isComplete() ) {
tokens.append(tokenizer.getNextToken());
}
//Re-insert newline that was removed by the getline function
tokens.append("\n");
}
//Test your tokenization of the file by traversing the tokens list and printing out the tokens
Token *t = tokens.getFirst();
while(t) {
cout << ""<<t->getStringRep();
//t->setTokenDetails("std_logic", 0);
tokens.findAndSetTokenDetails(t);
cout << "type Of token: " << t->getTokenType() << "\n";
if(t->isIdentifier() || t->isLiteral())
{
if(t->getTokenDetails() != nullptr){
cout << "size of width: " << t->getTokenDetails()->width <<"\n";
cout << "string type: " << t->getTokenDetails()->type << "\n";
}
}
cout<<"\n-------------------------------\n";
//cout<< "one iteration" ;
t = t->getNext();
}
cout << "\n\nremove comments starts here: \n";
removeComments(tokens);
//findAllConditionalExpressions(tokens);
//removeTokensOfType(tokens, T_Operator);
t=tokens.getFirst();
while(t) {
cout << "|"<<t->getStringRep();
//cout<< "one iteration" ;
t = t->getNext();
}
/*
cout<<"\n\n\n";
//removeComments(tokens);
while(t) {
//cout << t->getStringRep() << " ";
//cout<< "one iteration" ;
t = t->getNext();
}*/
}
示例8: main
//Example Test code for interacting with your Token, TokenList, and Tokenizer classes
//Add your own code to further test the operation of your Token, TokenList, and Tokenizer classes
int main() {
ifstream sourceFile;
TokenList tokens;
TokenList lines;
cout <<"run" << endl;
//Lists for types of tokens
TokenList operatorTokens;
TokenList identifierTokens;
TokenList literalTokens;
TokenList commentBodyTokens;
TokenList otherTokens;
Tokenizer tokenizer;
int Num = 0;
string Result;
//Read in a file line-by-line and tokenize each line
sourceFile.open("test.vhd");
if (!sourceFile.is_open())
{
cout << "Failed to open file" << endl;
return 1;
}
while (!sourceFile.eof())
{
string line;
getline(sourceFile, line);
Num++;
ostringstream convert;
convert << Num;
Result = convert.str();
tokenizer.setString(&line);
while (!tokenizer.isComplete())
{
lines.append(Result);
tokens.append(tokenizer.getNextToken());
}
}
/*Test your tokenization of the file by traversing the tokens list and printing out the tokens*/
cout << "[--------------------MASTER LIST----------------------]" << endl;
Token *m = tokens.getFirst();
Token *m2 = lines.getFirst();
int Num_tokens = 1;
while (m && m2)
{
cout << Num_tokens << " [" << m2->getStringRep() << "] ";
cout << " [" << m->getStringRep() << "] " << endl;
m = m->getNext();
m2 = m2->getNext();
Num_tokens++;
}
cout << endl;
/* For your testing purposes only*/
cout << "[--------------------- findAndSetTokenDetails -----------------------]" << endl;
string temp1;
int wide = 0;
Token *w = tokens.getFirst();
while (w)
{
tokens.findAndSetTokenDetails(w);
string type;
if (w->getTokenType() == 0)
{
type = "ID";
identifierTokens.append(w->getStringRep());
}
else if (w->getTokenType() == 1)
{
type = "Op";
operatorTokens.append(w->getStringRep());
}
else if (w->getTokenType() == 2)
{
type = "Lt";
literalTokens.append(w->getStringRep());
}
else if (w->getTokenType() == 3)
{
type = "Com";
commentBodyTokens.append(w->getStringRep());
}
else if (w->getTokenType() == 4)
{
//.........这里部分代码省略.........