本文整理汇总了C#中JsonReader.JsonTokenizer类的典型用法代码示例。如果您正苦于以下问题:C# JsonReader.JsonTokenizer类的具体用法?C# JsonReader.JsonTokenizer怎么用?C# JsonReader.JsonTokenizer使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
JsonReader.JsonTokenizer类属于命名空间,在下文中一共展示了JsonReader.JsonTokenizer类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: ConvertJson2Bson_HelloWorld_RoundTripsJsonToBsonAndBack
public void ConvertJson2Bson_HelloWorld_RoundTripsJsonToBsonAndBack()
{
// input from example at http://bsonspec.org/#/specification
var inputText =
@"{
""hello"" : ""world""
}";
var expectedBinary = Encoding.UTF8.GetBytes(
"\x16\x00\x00\x00\x02hello\x00"+
"\x06\x00\x00\x00world\x00\x00");
var expectedText = @"{""hello"":""world""}";
var jsonTokenizer = new JsonReader.JsonTokenizer();
var tokens1 = jsonTokenizer.GetTokens(inputText);
var bsonFormatter = new BsonWriter.BsonFormatter();
var actualBinary = bsonFormatter.Format(tokens1);
Assert.Equal(expectedBinary, actualBinary);
var bsonTokenizer = new BsonReader.BsonTokenizer();
var tokens2 = bsonTokenizer.GetTokens(actualBinary);
var jsonFormatter = new JsonWriter.JsonFormatter(new DataWriterSettings { PrettyPrint = false });
var actualText = jsonFormatter.Format(tokens2);
Assert.Equal(expectedText, actualText);
}
示例2: ConvertJson2Xml_HelloWorld_RoundTripsJsonToXmlAndBack
public void ConvertJson2Xml_HelloWorld_RoundTripsJsonToXmlAndBack()
{
// input from example at http://xmlspec.org/#/specification
var inputJson =
@"{
""hello"" : ""world""
}";
var expectedXml = @"<object><hello>world</hello></object>";
var jsonTokenizer = new JsonReader.JsonTokenizer();
var tokens1 = jsonTokenizer.GetTokens(inputJson);
var writerSettings = new DataWriterSettings { PrettyPrint = false };
var xmlFormatter = new TransformFormatter<ModelTokenType, MarkupTokenType>(new XmlWriter.XmlFormatter(writerSettings), new XmlWriter.XmlOutTransformer(writerSettings));
var actualXml = xmlFormatter.Format(tokens1);
Assert.Equal(expectedXml, actualXml);
var expectedJson = @"{""hello"":""world""}";
var readerSettings = new DataReaderSettings(writerSettings.Resolver);
var xmlTokenizer = new TransformTokenizer<MarkupTokenType, ModelTokenType>(new XmlReader.XmlTokenizer(), new XmlReader.XmlInTransformer(readerSettings));
var tokens2 = xmlTokenizer.GetTokens(actualXml);
var jsonFormatter = new JsonWriter.JsonFormatter(new DataWriterSettings { PrettyPrint = false });
var actualJson = jsonFormatter.Format(tokens2);
Assert.Equal(expectedJson, actualJson);
}
示例3: GetTokens_ArrayEmpty_ReturnsEmptyArrayTokens
public void GetTokens_ArrayEmpty_ReturnsEmptyArrayTokens()
{
const string input = "[]";
var expected = new []
{
ModelGrammar.TokenArrayBeginUnnamed,
ModelGrammar.TokenArrayEnd
};
var tokenizer = new JsonReader.JsonTokenizer();
var actual = tokenizer.GetTokens(input).ToArray();
Assert.Equal(expected, actual);
}
示例4: GetTokens_NumberNegNoLeadingDigitFloat_ReturnsNumberToken
public void GetTokens_NumberNegNoLeadingDigitFloat_ReturnsNumberToken()
{
const string input = "-.123456";
var expected = new []
{
ModelGrammar.TokenPrimitive(-.123456)
};
var tokenizer = new JsonReader.JsonTokenizer();
var actual = tokenizer.GetTokens(input).ToArray();
Assert.Equal(expected, actual);
}
示例5: GetTokens_NumberIntegerLeadingZero_ReturnsObjectTokensWithNumberValue
public void GetTokens_NumberIntegerLeadingZero_ReturnsObjectTokensWithNumberValue()
{
// input from fail13.json in test suite at http://www.json.org/JSON_checker/
const string input = @"{""Numbers cannot have leading zeroes"": 013}";
var expected = new []
{
ModelGrammar.TokenObjectBeginUnnamed,
ModelGrammar.TokenProperty("Numbers cannot have leading zeroes"),
ModelGrammar.TokenPrimitive(13),
ModelGrammar.TokenObjectEnd
};
var tokenizer = new JsonReader.JsonTokenizer();
var actual = tokenizer.GetTokens(input).ToArray();
// this is not allowed according to strict JSON, but we're following Postel's Law
Assert.Equal(expected, actual);
}
示例6: GetTokens_ObjectUnterminated_ProducesInvalidSequence
public void GetTokens_ObjectUnterminated_ProducesInvalidSequence()
{
// NOTE: analyzer must flag this as an error as is grammar error, not tokenization error
// input from fail32.json in test suite at http://www.json.org/JSON_checker/
var input = @"{""Comma instead of closing brace"": true,";
var expected = new[]
{
ModelGrammar.TokenObjectBeginUnnamed,
ModelGrammar.TokenProperty("Comma instead of closing brace"),
ModelGrammar.TokenTrue
};
var tokenizer = new JsonReader.JsonTokenizer();
var actual = tokenizer.GetTokens(input).ToArray();
Assert.Equal(expected, actual);
}
示例7: GetTokens_ObjectCommaInsteadOfColon_ProducesInvalidSequence
public void GetTokens_ObjectCommaInsteadOfColon_ProducesInvalidSequence()
{
// input from fail21.json in test suite at http://www.json.org/JSON_checker/
var input = @"{""Comma instead of colon"", null}";
var expected = new[]
{
ModelGrammar.TokenObjectBeginUnnamed,
ModelGrammar.TokenPrimitive("Comma instead of colon"),
ModelGrammar.TokenNull,
ModelGrammar.TokenObjectEnd
};
var tokenizer = new JsonReader.JsonTokenizer();
var actual = tokenizer.GetTokens(input).ToArray();
Assert.Equal(expected, actual);
}
示例8: GetTokens_LiteralNonQuotedKey_ReturnsObjectTokensWithLiteralKey
public void GetTokens_LiteralNonQuotedKey_ReturnsObjectTokensWithLiteralKey()
{
// input from fail3.json in test suite at http://www.json.org/JSON_checker/
const string input = @"{unquoted_key: ""keys must be quoted""}";
var expected = new []
{
ModelGrammar.TokenObjectBeginUnnamed,
ModelGrammar.TokenProperty("unquoted_key"),
ModelGrammar.TokenPrimitive("keys must be quoted"),
ModelGrammar.TokenObjectEnd
};
var tokenizer = new JsonReader.JsonTokenizer();
var actual = tokenizer.GetTokens(input).ToArray();
Assert.Equal(expected, actual);
}
示例9: GetTokens_StringSimple_ReturnsStringToken
public void GetTokens_StringSimple_ReturnsStringToken()
{
// input from fail1.json in test suite at http://www.json.org/JSON_checker/
const string input = @"""A JSON payload should be an object or array, not a string.""";
var expected = new []
{
ModelGrammar.TokenPrimitive("A JSON payload should be an object or array, not a string.")
};
var tokenizer = new JsonReader.JsonTokenizer();
var actual = tokenizer.GetTokens(input).ToArray();
// this is not allowed according to strict JSON, but we're following Postel's Law
Assert.Equal(expected, actual);
}
示例10: GetTokens_StringUnescapedSingleQuote_ReturnsStringToken
public void GetTokens_StringUnescapedSingleQuote_ReturnsStringToken()
{
const string input = @"""unescaped ' single quote""";
var expected = new []
{
ModelGrammar.TokenPrimitive("unescaped ' single quote"),
};
var tokenizer = new JsonReader.JsonTokenizer();
var actual = tokenizer.GetTokens(input).ToArray();
Assert.Equal(expected, actual);
}
示例11: GetTokens_NumberFloatMissingFractional_ThrowsDeserializationException
public void GetTokens_NumberFloatMissingFractional_ThrowsDeserializationException()
{
const string input = @"123.e5";
var tokenizer = new JsonReader.JsonTokenizer();
DeserializationException ex = Assert.Throws<DeserializationException>(
delegate
{
var actual = tokenizer.GetTokens(input).ToArray();
});
// verify exception is coming from expected index
Assert.Equal(0, ex.Index);
}
示例12: GetTokens_StringUnrecognizedEscapeNull_CharIgnored
public void GetTokens_StringUnrecognizedEscapeNull_CharIgnored()
{
// input from fail17.json in test suite at http://www.json.org/JSON_checker/
const string input = @"[""Illegal backslash escape: \017""]";
var expected = new []
{
ModelGrammar.TokenArrayBeginUnnamed,
ModelGrammar.TokenPrimitive("Illegal backslash escape: 17"),
ModelGrammar.TokenArrayEnd
};
var tokenizer = new JsonReader.JsonTokenizer();
var actual = tokenizer.GetTokens(input).ToArray();
// this is not allowed according to strict JSON, but we're following Postel's Law
Assert.Equal(expected, actual);
}
示例13: GetTokens_StringSingleQuote_ReturnsStringToken
public void GetTokens_StringSingleQuote_ReturnsStringToken()
{
// input from fail24.json in test suite at http://www.json.org/JSON_checker/
const string input = @"['single quote']";
var expected = new []
{
ModelGrammar.TokenArrayBeginUnnamed,
ModelGrammar.TokenPrimitive("single quote"),
ModelGrammar.TokenArrayEnd
};
var tokenizer = new JsonReader.JsonTokenizer();
var actual = tokenizer.GetTokens(input).ToArray();
// this is not allowed according to strict JSON, but we're following Postel's Law
Assert.Equal(expected, actual);
}
示例14: GetTokens_StringImproperlyEscapedChars_ReturnsStringTokenWithSimpleChars
public void GetTokens_StringImproperlyEscapedChars_ReturnsStringTokenWithSimpleChars()
{
const string input = @"""\u\u1\u12\u123\u12345""";
var expected = new []
{
ModelGrammar.TokenPrimitive("uu1u12u123\u12345")
};
var tokenizer = new JsonReader.JsonTokenizer();
var actual = tokenizer.GetTokens(input).ToArray();
Assert.Equal(expected, actual);
}
示例15: GetTokens_HighControl_Accepted
public void GetTokens_HighControl_Accepted()
{
const string input = "\"\u0082\"";
var expected = new []
{
ModelGrammar.TokenPrimitive("\u0082")
};
var tokenizer = new JsonReader.JsonTokenizer();
var actual = tokenizer.GetTokens(input).ToArray();
Assert.Equal(expected, actual);
}