当前位置: 首页>>代码示例>>C++>>正文


C++ Tokeniser::nextLine方法代码示例

本文整理汇总了C++中Tokeniser::nextLine方法的典型用法代码示例。如果您正苦于以下问题:C++ Tokeniser::nextLine方法的具体用法?C++ Tokeniser::nextLine怎么用?C++ Tokeniser::nextLine使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在Tokeniser的用法示例。


在下文中一共展示了Tokeniser::nextLine方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: EntityClassDoom3_parseUnknown

bool EntityClassDoom3_parseUnknown( Tokeniser& tokeniser ){
	//const char* name =
	PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser ) );

	//globalOutputStream() << "parsing unknown block " << makeQuoted(name) << "\n";

	PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser, "{" ) );
	tokeniser.nextLine();

	std::size_t depth = 1;
	for (;; )
	{
		const char* token;
		PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, token ) );
		if ( string_equal( token, "}" ) ) {
			if ( --depth == 0 ) {
				tokeniser.nextLine();
				break;
			}
		}
		else if ( string_equal( token, "{" ) ) {
			++depth;
		}
		tokeniser.nextLine();
	}
	return true;
}
开发者ID:xonotic,项目名称:netradient,代码行数:27,代码来源:eclass_doom3.cpp

示例2: Map_Read

void Map_Read(scene::Node& root, Tokeniser& tokeniser, EntityCreator& entityTable, const PrimitiveParser& parser)
{
  int count_entities = 0;
  for(;;)
  {
    tokeniser.nextLine();
    if (!tokeniser.getToken()) // { or 0
		  break;

    NodeSmartReference entity(Entity_parseTokens(tokeniser, entityTable, parser, count_entities));

    if(entity == g_nullNode)
    {
      globalErrorStream() << "entity " << count_entities << ": parse error\n";
      return;
    }

    Node_getTraversable(root)->insert(entity);

    ++count_entities;
  }
}
开发者ID:ChunHungLiu,项目名称:GtkRadiant,代码行数:22,代码来源:parse.cpp

示例3: EntityClass_parse

static bool EntityClass_parse( EntityClass& entityClass, Tokeniser& tokeniser ){
	PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, entityClass.m_name ) );

	PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser, "{" ) );
	tokeniser.nextLine();

	StringOutputStream usage( 256 );
	StringOutputStream description( 256 );
	CopiedString* currentDescription = 0;
	StringOutputStream* currentString = 0;

	for (;; )
	{
		const char* key;
		PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, key ) );

		const char* last = string_findFirstSpaceOrTab( key );
		CopiedString first( StringRange( key, last ) );

		if ( !string_empty( last ) ) {
			last = string_findFirstNonSpaceOrTab( last );
		}

		if ( currentString != 0 && string_equal( key, "\\" ) ) {
			tokeniser.nextLine();
			*currentString << " ";
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, *currentString ) );
			continue;
		}

		if ( currentDescription != 0 ) {
			*currentDescription = description.c_str();
			description.clear();
			currentDescription = 0;
		}
		currentString = 0;

		if ( string_equal( key, "}" ) ) {
			tokeniser.nextLine();
			break;
		}
		else if ( string_equal( key, "model" ) ) {
			const char* token;
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, token ) );
			entityClass.fixedsize = true;
			StringOutputStream buffer( 256 );
			buffer << PathCleaned( token );
			entityClass.m_modelpath = buffer.c_str();
		}
		else if ( string_equal( key, "editor_color" ) ) {
			const char* value;
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, value ) );
			if ( !string_empty( value ) ) {
				entityClass.colorSpecified = true;
				bool success = string_parse_vector3( value, entityClass.color );
				ASSERT_MESSAGE( success, "editor_color: parse error" );
			}
		}
		else if ( string_equal( key, "editor_ragdoll" ) ) {
			//bool ragdoll = atoi(tokeniser.getToken()) != 0;
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser ) );
		}
		else if ( string_equal( key, "editor_mins" ) ) {
			entityClass.sizeSpecified = true;
			const char* value;
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, value ) );
			if ( !string_empty( value ) && !string_equal( value, "?" ) ) {
				entityClass.fixedsize = true;
				bool success = string_parse_vector3( value, entityClass.mins );
				ASSERT_MESSAGE( success, "editor_mins: parse error" );
			}
		}
		else if ( string_equal( key, "editor_maxs" ) ) {
			entityClass.sizeSpecified = true;
			const char* value;
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, value ) );
			if ( !string_empty( value ) && !string_equal( value, "?" ) ) {
				entityClass.fixedsize = true;
				bool success = string_parse_vector3( value, entityClass.maxs );
				ASSERT_MESSAGE( success, "editor_maxs: parse error" );
			}
		}
		else if ( string_equal( key, "editor_usage" ) ) {
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, usage ) );
			currentString = &usage;
		}
		else if ( string_equal_n( key, "editor_usage", 12 ) ) {
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, usage ) );
			currentString = &usage;
		}
		else if ( string_equal( key, "editor_rotatable" )
				  || string_equal( key, "editor_showangle" )
				  || string_equal( key, "editor_showangles" ) // typo? in prey movables.def
				  || string_equal( key, "editor_mover" )
				  || string_equal( key, "editor_model" )
				  || string_equal( key, "editor_material" )
				  || string_equal( key, "editor_combatnode" )
				  || ( !string_empty( last ) && string_equal( first.c_str(), "editor_gui" ) )
				  || string_equal_n( key, "editor_copy", 11 ) ) {
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser ) );
//.........这里部分代码省略.........
开发者ID:xonotic,项目名称:netradient,代码行数:101,代码来源:eclass_doom3.cpp

示例4: EntityClassDoom3_parseModel

bool EntityClassDoom3_parseModel( Tokeniser& tokeniser ){
	const char* name;
	PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, name ) );

	Model& model = g_models[name];

	PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser, "{" ) );
	tokeniser.nextLine();

	for (;; )
	{
		const char* parameter;
		PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, parameter ) );
		if ( string_equal( parameter, "}" ) ) {
			tokeniser.nextLine();
			break;
		}
		else if ( string_equal( parameter, "inherit" ) ) {
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, model.m_parent ) );
			tokeniser.nextLine();
		}
		else if ( string_equal( parameter, "remove" ) ) {
			//const char* remove =
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser ) );
			tokeniser.nextLine();
		}
		else if ( string_equal( parameter, "mesh" ) ) {
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, model.m_mesh ) );
			tokeniser.nextLine();
		}
		else if ( string_equal( parameter, "skin" ) ) {
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, model.m_skin ) );
			tokeniser.nextLine();
		}
		else if ( string_equal( parameter, "offset" ) ) {
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser, "(" ) );
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser ) );
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser ) );
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser ) );
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser, ")" ) );
			tokeniser.nextLine();
		}
		else if ( string_equal( parameter, "channel" ) ) {
			//const char* channelName =
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser ) );
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser, "(" ) );
			for (;; )
			{
				const char* end;
				PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, end ) );
				if ( string_equal( end, ")" ) ) {
					tokeniser.nextLine();
					break;
				}
			}
		}
		else if ( string_equal( parameter, "anim" ) ) {
			CopiedString animName;
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, animName ) );
			const char* animFile;
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, animFile ) );
			model.m_anims.insert( Model::Anims::value_type( animName, animFile ) );

			const char* token;
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, token ) );

			while ( string_equal( token, "," ) )
			{
				PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, animFile ) );
				PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, token ) );
			}

			if ( string_equal( token, "{" ) ) {
				for (;; )
				{
					const char* end;
					PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, end ) );
					if ( string_equal( end, "}" ) ) {
						tokeniser.nextLine();
						break;
					}
					tokeniser.nextLine();
				}
			}
			else
			{
				tokeniser.ungetToken();
			}
		}
		else
		{
			globalErrorStream() << "unknown model parameter: " << makeQuoted( parameter ) << "\n";
			return false;
		}
		tokeniser.nextLine();
	}
	return true;
}
开发者ID:xonotic,项目名称:netradient,代码行数:98,代码来源:eclass_doom3.cpp

示例5: Entity_parseTokens

NodeSmartReference Entity_parseTokens(Tokeniser& tokeniser, EntityCreator& entityTable, const PrimitiveParser& parser, int index)
{
  NodeSmartReference entity(g_nullNode);
  KeyValues keyValues;
  const char* classname = "";

  int count_primitives = 0;
  while(1)
  {
    tokeniser.nextLine();
    const char* token = tokeniser.getToken();
    if(token == 0)
    {
      Tokeniser_unexpectedError(tokeniser, token, "#entity-token");
      return g_nullNode;
    }
    if (!strcmp(token, "}")) // end entity
    {
      if(entity == g_nullNode)
      {
        // entity does not have brushes
        entity = Entity_create(entityTable, GlobalEntityClassManager().findOrInsert(classname, false), keyValues);
      }
      return entity;
    }
    else if(!strcmp(token, "{")) // begin primitive
    {
      if(entity == g_nullNode)
      {
        // entity has brushes
        entity = Entity_create(entityTable, GlobalEntityClassManager().findOrInsert(classname, true), keyValues);
      }

      tokeniser.nextLine();

      NodeSmartReference primitive(parser.parsePrimitive(tokeniser));
      if(primitive == g_nullNode || !Node_getMapImporter(primitive)->importTokens(tokeniser))
      {
        globalErrorStream() << "brush " << count_primitives << ": parse error\n";
        return g_nullNode;
      }

      scene::Traversable* traversable = Node_getTraversable(entity);
      if(Node_getEntity(entity)->isContainer() && traversable != 0)
      {
        traversable->insert(primitive);
      }
      else
      {
        globalErrorStream() << "entity " << index << ": type " << classname << ": discarding brush " << count_primitives << "\n";
      }
      ++count_primitives;
    }
    else // epair
    {
      CopiedString key(token);
      token = tokeniser.getToken();
      if(token == 0)
      {
        Tokeniser_unexpectedError(tokeniser, token, "#epair-value");
        return g_nullNode;
      }
      keyValues.push_back(KeyValues::value_type(key, token));
      if(string_equal(key.c_str(), "classname"))
      {
        classname = keyValues.back().second.c_str();
      }
    }
  }
  // unreachable code
  return g_nullNode;
}
开发者ID:ChunHungLiu,项目名称:GtkRadiant,代码行数:72,代码来源:parse.cpp


注:本文中的Tokeniser::nextLine方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。