本文整理汇总了C++中parser::DefTokeniser::nextToken方法的典型用法代码示例。如果您正苦于以下问题:C++ DefTokeniser::nextToken方法的具体用法?C++ DefTokeniser::nextToken怎么用?C++ DefTokeniser::nextToken使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类parser::DefTokeniser
的用法示例。
在下文中一共展示了DefTokeniser::nextToken方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: parseFromTokens
void ParticleDef::parseFromTokens(parser::DefTokeniser& tok)
{
// Clear out the particle def (except the name) before parsing
clear();
// Any global keywords will come first, after which we get a series of
// brace-delimited stages.
std::string token = tok.nextToken();
while (token != "}")
{
if (token == "depthHack")
{
setDepthHack(string::convert<float>(tok.nextToken()));
}
else if (token == "{")
{
// Construct/Parse the stage from the tokens
StageDefPtr stage = make_shared<StageDef>(ref(tok));
// Append to the ParticleDef
appendStage(stage);
}
// Get next token
token = tok.nextToken();
}
_changedSignal.emit();
}
示例2: jumpOutOfBrackets
void XData::jumpOutOfBrackets(parser::DefTokeniser& tok, int currentDepth) const
{
while ( tok.hasMoreTokens() && currentDepth > 0)
{
std::string token = tok.nextToken();
if (token == "{")
currentDepth += 1;
else if (token == "}")
currentDepth -= 1;
}
}
示例3: parseParticleDef
// Parse a single particle def
void ParticlesManager::parseParticleDef(parser::DefTokeniser& tok, const std::string& filename)
{
// Standard DEF, starts with "particle <name> {"
std::string declName = tok.nextToken();
// Check for a valid particle declaration, some .prt files contain materials
if (declName != "particle")
{
// No particle, skip name plus whole block
tok.skipTokens(1);
tok.assertNextToken("{");
for (std::size_t level = 1; level > 0;)
{
std::string token = tok.nextToken();
if (token == "}")
{
level--;
}
else if (token == "{")
{
level++;
}
}
return;
}
// Valid particle declaration, go ahead parsing the name
std::string name = tok.nextToken();
tok.assertNextToken("{");
ParticleDefPtr pdef = findOrInsertParticleDef(name);
pdef->setFilename(filename);
// Let the particle construct itself from the token stream
pdef->parseFromTokens(tok);
}
示例4: GlobalPatchCreator
/*
// Example Primitive
{
patchDef2
{
"textures/darkmod/stone/brick/rough_big_blocks03"
( 5 3 0 0 0 )
(
( ( 64 -88 108 0 0 ) ( 64 -88 184 0 -1.484375 ) ( 64 -88 184 0 -1.484375 ) )
( ( 64 -88 184 1.484375 0 ) ( 64 -88 184 1.484375 -1.484375 ) ( 64 -88 184 1.484375 -1.484375 ) )
( ( 112 -88 184 2.421875 0 ) ( 112 -88 184 2.421875 -1.484375 ) ( 112 -88 184 2.421875 -1.484375 ) )
( ( 160 -88 184 3.359375 0 ) ( 160 -88 184 3.359375 -1.484375 ) ( 160 -88 184 3.359375 -1.484375 ) )
( ( 160 -88 108 4.84375 0 ) ( 160 -88 184 4.84375 -1.484375 ) ( 160 -88 184 4.84375 -1.484375 ) )
)
}
}
*/
scene::INodePtr PatchDef2Parser::parse(parser::DefTokeniser& tok) const
{
scene::INodePtr node = GlobalPatchCreator(DEF2).createPatch();
IPatchNodePtr patchNode = boost::dynamic_pointer_cast<IPatchNode>(node);
assert(patchNode != NULL);
IPatch& patch = patchNode->getPatch();
tok.assertNextToken("{");
// Parse shader
patch.setShader(tok.nextToken());
// Parse parameters
tok.assertNextToken("(");
// parse matrix dimensions
std::size_t cols = string::convert<std::size_t>(tok.nextToken());
std::size_t rows = string::convert<std::size_t>(tok.nextToken());
patch.setDims(cols, rows);
// ignore contents/flags values
tok.skipTokens(3);
tok.assertNextToken(")");
// Parse Patch Matrix
parseMatrix(tok, patch);
// Parse Footer
tok.assertNextToken("}");
tok.assertNextToken("}");
patch.controlPointsChanged();
return node;
}
示例5: parseFromTokens
void ParticleParameter::parseFromTokens(parser::DefTokeniser& tok)
{
std::string val = tok.nextToken();
try
{
setFrom(boost::lexical_cast<float>(val));
}
catch (boost::bad_lexical_cast&)
{
rError() << "[particles] Bad lower value, token is '" <<
val << "'" << std::endl;
}
if (tok.peek() == "to")
{
// Upper value is there, parse it
tok.skipTokens(1); // skip the "to"
val = tok.nextToken();
try
{
// cut off the quotes before converting to double
setTo(boost::lexical_cast<float>(val));
}
catch (boost::bad_lexical_cast&)
{
rError() << "[particles] Bad upper value, token is '" <<
val << "'" << std::endl;
}
}
else
{
setTo(getFrom());
}
}
示例6: getExpression
// Returns a GUI expression, which can be a number, a string or a formula ("gui::objVisible" == 1).
std::string GuiWindowDef::getExpression(parser::DefTokeniser& tokeniser)
{
std::string returnValue = tokeniser.nextToken();
if (returnValue == "(")
{
// Assemble token until closing brace found
std::size_t depth = 1;
while (depth > 0 && tokeniser.hasMoreTokens())
{
std::string token = tokeniser.nextToken();
if (token == ")") depth--;
returnValue += token;
}
}
// Strip quotes
boost::algorithm::trim_if(returnValue, boost::algorithm::is_any_of("\""));
return returnValue;
}
示例7: FailureException
void Doom3MapReader::parseMapVersion(parser::DefTokeniser& tok)
{
// Parse the map version
float version = 0;
try
{
tok.assertNextToken("Version");
version = boost::lexical_cast<float>(tok.nextToken());
}
catch (parser::ParseException& e)
{
// failed => quit
rError()
<< "[mapdoom3] Unable to parse map version: "
<< e.what() << std::endl;
throw FailureException(_("Unable to parse map version (parse exception)."));
}
catch (boost::bad_lexical_cast& e)
{
rError()
<< "[mapdoom3] Unable to parse map version: "
<< e.what() << std::endl;
throw FailureException(_("Could not recognise map version number format."));
}
float requiredVersion = MAP_VERSION_D3;
// Check we have the correct version for this module
if (version != requiredVersion)
{
std::string errMsg = (boost::format(_("Incorrect map version: required %f, found %f")) % requiredVersion % version).str();
rError() << errMsg << std::endl;
throw FailureException(errMsg);
}
// success
}
示例8: FailureException
void Quake3MapReader::parsePrimitive(parser::DefTokeniser& tok, const scene::INodePtr& parentEntity)
{
_primitiveCount++;
std::string primitiveKeyword = tok.nextToken();
// Get a parser for this keyword
PrimitiveParsers::const_iterator p = _primitiveParsers.find(primitiveKeyword);
if (p == _primitiveParsers.end())
{
throw FailureException("Unknown primitive type: " + primitiveKeyword);
}
const PrimitiveParserPtr& parser = p->second;
// Try to parse the primitive, throwing exception if failed
try
{
scene::INodePtr primitive = parser->parse(tok);
if (!primitive)
{
std::string text = (boost::format(_("Primitive #%d: parse error")) % _primitiveCount).str();
throw FailureException(text);
}
// Now add the primitive as a child of the entity
_importFilter.addPrimitiveToEntity(primitive, parentEntity);
}
catch (parser::ParseException& e)
{
// Translate ParseExceptions to FailureExceptions
std::string text = (boost::format(_("Primitive #%d: parse exception %s")) % _primitiveCount % e.what()).str();
throw FailureException(text);
}
}
示例9: if
scene::INodePtr BrushDef3Parser::parse(parser::DefTokeniser& tok) const
{
// Create a new brush
scene::INodePtr node = GlobalBrushCreator().createBrush();
// Cast the node, this must succeed
IBrushNodePtr brushNode = boost::dynamic_pointer_cast<IBrushNode>(node);
assert(brushNode != NULL);
IBrush& brush = brushNode->getIBrush();
tok.assertNextToken("{");
// Parse face tokens until a closing brace is encountered
while (1)
{
std::string token = tok.nextToken();
// Token should be either a "(" (start of face) or "}" (end of brush)
if (token == "}")
{
break; // end of brush
}
else if (token == "(") // FACE
{
// Construct a plane and parse its values
Plane3 plane;
plane.normal().x() = string::to_float(tok.nextToken());
plane.normal().y() = string::to_float(tok.nextToken());
plane.normal().z() = string::to_float(tok.nextToken());
plane.dist() = -string::to_float(tok.nextToken()); // negate d
tok.assertNextToken(")");
// Parse TexDef
Matrix4 texdef;
tok.assertNextToken("(");
tok.assertNextToken("(");
texdef.xx() = string::to_float(tok.nextToken());
texdef.yx() = string::to_float(tok.nextToken());
texdef.tx() = string::to_float(tok.nextToken());
tok.assertNextToken(")");
tok.assertNextToken("(");
texdef.xy() = string::to_float(tok.nextToken());
texdef.yy() = string::to_float(tok.nextToken());
texdef.ty() = string::to_float(tok.nextToken());
tok.assertNextToken(")");
tok.assertNextToken(")");
// Parse Shader
std::string shader = tok.nextToken();
// Parse Flags (usually each brush has all faces detail or all faces structural)
IBrush::DetailFlag flag = static_cast<IBrush::DetailFlag>(
string::convert<std::size_t>(tok.nextToken(), IBrush::Structural));
brush.setDetailFlag(flag);
// Ignore the other two flags
tok.skipTokens(2);
// Finally, add the new face to the brush
/*IFace& face = */brush.addFace(plane, texdef, shader);
}
else {
std::string text = (boost::format(_("BrushDef3Parser: invalid token '%s'")) % token).str();
throw parser::ParseException(text);
}
}
// Final outer "}"
tok.assertNextToken("}");
return node;
}
示例10: if
void Quake3MapReader::parseEntity(parser::DefTokeniser& tok)
{
// Map of keyvalues for this entity
EntityKeyValues keyValues;
// The actual entity. This is initially null, and will be created when
// primitives start or the end of the entity is reached
scene::INodePtr entity;
// Start parsing, first token must be an open brace
tok.assertNextToken("{");
std::string token = tok.nextToken();
// Reset the primitive counter, we're starting a new entity
_primitiveCount = 0;
while (true)
{
// Token must be either a key, a "{" to indicate the start of a
// primitive, or a "}" to indicate the end of the entity
if (token == "{") // PRIMITIVE
{
// Create the entity right now, if not yet done
if (entity == NULL)
{
entity = createEntity(keyValues);
}
// Parse the primitive block, and pass the parent entity
parsePrimitive(tok, entity);
}
else if (token == "}") // END OF ENTITY
{
// Create the entity if necessary and return it
if (entity == NULL)
{
entity = createEntity(keyValues);
}
break;
}
else // KEY
{
std::string value = tok.nextToken();
// Sanity check (invalid number of tokens will get us out of sync)
if (value == "{" || value == "}")
{
std::string text = (boost::format(_("Parsed invalid value '%s' for key '%s'")) % value % token).str();
throw FailureException(text);
}
// Otherwise add the keyvalue pair to our map
keyValues.insert(EntityKeyValues::value_type(token, value));
}
// Get the next token
token = tok.nextToken();
}
// Insert the entity
_importFilter.addEntity(entity);
}
示例11: if
void Doom3EntityClass::parseFromTokens(parser::DefTokeniser& tokeniser)
{
// Clear this structure first, we might be "refreshing" ourselves from tokens
clear();
// Required open brace (the name has already been parsed by the EClassManager)
tokeniser.assertNextToken("{");
// Loop over all of the keys in this entitydef
std::string key;
while ((key = tokeniser.nextToken()) != "}")
{
const std::string value = tokeniser.nextToken();
// Handle some keys specially
if (key == "model")
{
setModelPath(os::standardPath(value));
}
else if (key == "editor_color")
{
setColour(string::convert<Vector3>(value));
}
else if (key == "editor_light")
{
setIsLight(value == "1");
}
else if (key == "spawnclass")
{
setIsLight(value == "idLight");
}
else if (boost::algorithm::istarts_with(key, "editor_"))
{
parseEditorSpawnarg(key, value);
}
// Try parsing this key/value with the Attachments manager
_attachments->parseDefAttachKeys(key, value);
// Add the EntityClassAttribute for this key/val
if (getAttribute(key).getType().empty())
{
// Following key-specific processing, add the keyvalue to the eclass
EntityClassAttribute attribute("text", key, value, "");
// Type is empty, attribute does not exist, add it.
addAttribute(attribute);
}
else if (getAttribute(key).getValue().empty())
{
// Attribute type is set, but value is empty, set the value.
getAttribute(key).setValue(value);
}
else
{
// Both type and value are not empty, emit a warning
rWarning() << "[eclassmgr] attribute " << key
<< " already set on entityclass " << _name << std::endl;
}
} // while true
_attachments->validateAttachments();
// Notify the observers
_changedSignal.emit();
}
示例12: parse
/*
// Example Primitive
{
brushDef
{
( -1216 -464 232 ) ( -1088 -464 232 ) ( -1088 -80 120 ) ( ( 0.031250 0 14 ) ( -0.000009 0.031250 4.471550 ) ) common/caulk 134217728 4 0
( -1088 -464 248 ) ( -1216 -464 248 ) ( -1216 -80 136 ) ( ( 0 -0.031373 -0.147059 ) ( 0.007812 0 0.049020 ) ) common/caulk 134217728 0 0
( -1088 -560 120 ) ( -1088 -560 136 ) ( -1088 -80 136 ) ( ( 0.031250 0 16.500000 ) ( 0 0.031250 0.250000 ) ) common/caulk 134217728 4 0
( -1088 -80 136 ) ( -1216 -80 136 ) ( -1216 -80 8 ) ( ( 0.031250 0 2 ) ( 0 0.031250 0.250000 ) ) common/caulk 134217728 4 0
( -1216 -400 136 ) ( -1216 -400 120 ) ( -1216 -80 120 ) ( ( 0.031250 0 -16.500000 ) ( 0 0.031250 0.250000 ) ) common/caulk 134217728 4 0
( -1088 -464 232 ) ( -1216 -464 232 ) ( -1216 -464 248 ) ( ( 0.031250 0 -2 ) ( 0 0.031250 0.250000 ) ) common/caulk 134217728 4 0
}
}
*/
scene::INodePtr BrushDefParser::parse(parser::DefTokeniser& tok) const
{
// Create a new brush
scene::INodePtr node = GlobalBrushCreator().createBrush();
// Cast the node, this must succeed
IBrushNodePtr brushNode = boost::dynamic_pointer_cast<IBrushNode>(node);
assert(brushNode != NULL);
IBrush& brush = brushNode->getIBrush();
tok.assertNextToken("{");
// Parse face tokens until a closing brace is encountered
while (1)
{
std::string token = tok.nextToken();
// Token should be either a "(" (start of face) or "}" (end of brush)
if (token == "}")
{
break; // end of brush
}
else if (token == "(") // FACE
{
// Parse three 3D points to construct a plane
Vector3 p1(string::to_float(tok.nextToken()), string::to_float(tok.nextToken()), string::to_float(tok.nextToken()));
tok.assertNextToken(")");
tok.assertNextToken("(");
Vector3 p2(string::to_float(tok.nextToken()), string::to_float(tok.nextToken()), string::to_float(tok.nextToken()));
tok.assertNextToken(")");
tok.assertNextToken("(");
Vector3 p3(string::to_float(tok.nextToken()), string::to_float(tok.nextToken()), string::to_float(tok.nextToken()));
tok.assertNextToken(")");
// Construct the plane from the three points
Plane3 plane(p1, p2, p3);
// Parse TexDef
Matrix4 texdef;
tok.assertNextToken("(");
tok.assertNextToken("(");
texdef.xx() = string::to_float(tok.nextToken());
texdef.yx() = string::to_float(tok.nextToken());
texdef.tx() = string::to_float(tok.nextToken());
tok.assertNextToken(")");
tok.assertNextToken("(");
texdef.xy() = string::to_float(tok.nextToken());
texdef.yy() = string::to_float(tok.nextToken());
texdef.ty() = string::to_float(tok.nextToken());
tok.assertNextToken(")");
tok.assertNextToken(")");
// Parse Shader, brushDef has an implicit "textures/" not written to the map
std::string shader = "textures/" + tok.nextToken();
// Parse Contents Flags (and ignore them)
tok.skipTokens(3);
// Finally, add the new face to the brush
/*IFace& face = */brush.addFace(plane, texdef, shader);
}
else
{
std::string text = (boost::format(_("BrushDefParser: invalid token '%s'")) % token).str();
throw parser::ParseException(text);
}
}
// Final outer "}"
tok.assertNextToken("}");
return node;
}
示例13: constructFromTokens
void GuiWindowDef::constructFromTokens(parser::DefTokeniser& tokeniser)
{
// The windowDef keyword has already been parsed, so expect a name plus an opening brace here
name = tokeniser.nextToken();
tokeniser.assertNextToken("{");
while (tokeniser.hasMoreTokens())
{
std::string token = tokeniser.nextToken();
boost::algorithm::to_lower(token);
if (token == "rect")
{
rect = parseVector4(tokeniser);
}
else if (token == "visible")
{
visible = parseBool(tokeniser);
}
else if (token == "notime")
{
notime = parseBool(tokeniser);
}
else if (token == "forecolor")
{
forecolor = parseVector4(tokeniser);
}
else if (token == "backcolor")
{
backcolor = parseVector4(tokeniser);
}
else if (token == "bordercolor")
{
bordercolor = parseVector4(tokeniser);
}
else if (token == "matcolor")
{
matcolor = parseVector4(tokeniser);
}
else if (token == "rotate")
{
rotate = parseFloat(tokeniser);
}
else if (token == "text")
{
setText(parseString(tokeniser));
}
else if (token == "font")
{
font = parseString(tokeniser);
// Cut off the "fonts/" part
boost::algorithm::replace_first(font, "fonts/", "");
}
else if (token == "textscale")
{
textscale = parseFloat(tokeniser);
}
else if (token == "textalign")
{
textalign = parseInt(tokeniser);
}
else if (token == "textalignx")
{
textalignx = parseFloat(tokeniser);
}
else if (token == "textaligny")
{
textaligny = parseFloat(tokeniser);
}
else if (token == "forceaspectwidth")
{
forceaspectwidth = parseFloat(tokeniser);
}
else if (token == "forceaspectheight")
{
forceaspectheight = parseFloat(tokeniser);
}
else if (token == "background")
{
background = parseString(tokeniser);
}
else if (token == "noevents")
{
noevents = parseBool(tokeniser);
}
else if (token == "nocursor")
{
nocursor = parseBool(tokeniser);
}
else if (token == "noclip")
{
noclip = parseBool(tokeniser);
}
else if (token == "nowrap")
{
nowrap = parseBool(tokeniser);
}
else if (token == "modal")
//.........这里部分代码省略.........
示例14:
void MD5Surface::parseFromTokens(parser::DefTokeniser& tok)
{
// Start of datablock
tok.assertNextToken("mesh");
tok.assertNextToken("{");
// Get the reference to the mesh definition
MD5Mesh& mesh = *_mesh;
// Get the shader name
tok.assertNextToken("shader");
setDefaultMaterial(tok.nextToken());
// ----- VERTICES ------
// Read the vertex count
tok.assertNextToken("numverts");
std::size_t numVerts = string::convert<std::size_t>(tok.nextToken());
// Initialise the vertex vector
MD5Verts& verts = mesh.vertices;
verts.resize(numVerts);
// Populate each vertex struct with parsed values
for (MD5Verts::iterator vt = verts.begin(); vt != verts.end(); ++vt) {
tok.assertNextToken("vert");
// Index of vert
vt->index = string::convert<std::size_t>(tok.nextToken());
// U and V texcoords
tok.assertNextToken("(");
vt->u = string::convert<float>(tok.nextToken());
vt->v = string::convert<float>(tok.nextToken());
tok.assertNextToken(")");
// Weight index and count
vt->weight_index = string::convert<std::size_t>(tok.nextToken());
vt->weight_count = string::convert<std::size_t>(tok.nextToken());
} // for each vertex
// ------ TRIANGLES ------
// Read the number of triangles
tok.assertNextToken("numtris");
std::size_t numTris = string::convert<std::size_t>(tok.nextToken());
// Initialise the triangle vector
MD5Tris& tris = mesh.triangles;
tris.resize(numTris);
// Read each triangle
for(MD5Tris::iterator tr = tris.begin(); tr != tris.end(); ++tr) {
tok.assertNextToken("tri");
// Triangle index, followed by the indexes of its 3 vertices
tr->index = string::convert<std::size_t>(tok.nextToken());
tr->a = string::convert<std::size_t>(tok.nextToken());
tr->b = string::convert<std::size_t>(tok.nextToken());
tr->c = string::convert<std::size_t>(tok.nextToken());
} // for each triangle
// ----- WEIGHTS ------
// Read the number of weights
tok.assertNextToken("numweights");
std::size_t numWeights = string::convert<std::size_t>(tok.nextToken());
// Initialise weights vector
MD5Weights& weights = mesh.weights;
weights.resize(numWeights);
// Populate with weight data
for(MD5Weights::iterator w = weights.begin(); w != weights.end(); ++w) {
tok.assertNextToken("weight");
// Index and joint
w->index = string::convert<std::size_t>(tok.nextToken());
w->joint = string::convert<std::size_t>(tok.nextToken());
// Strength and relative position
w->t = string::convert<float>(tok.nextToken());
w->v = MD5Model::parseVector3(tok);
} // for each weight
// ----- END OF MESH DECL -----
tok.assertNextToken("}");
}