本文整理汇总了C#中MySql.Data.MySqlClient.MySqlTokenizer类的典型用法代码示例。如果您正苦于以下问题:C# MySqlTokenizer类的具体用法?C# MySqlTokenizer怎么用?C# MySqlTokenizer使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
MySqlTokenizer类属于MySql.Data.MySqlClient命名空间,在下文中一共展示了MySqlTokenizer类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: AdjustDelimiterEnd
private void AdjustDelimiterEnd(MySqlTokenizer tokenizer)
{
if (tokenizer.StopIndex < query.Length)
{
int pos = tokenizer.StopIndex;
char c = query[pos];
while (!Char.IsWhiteSpace(c) && pos < (query.Length - 1))
{
c = query[++pos];
}
tokenizer.StopIndex = pos;
tokenizer.Position = pos;
}
}
示例2: PrepareCommandText
/// <summary>
/// Prepares CommandText for use with the Prepare method
/// </summary>
/// <returns>Command text stripped of all paramter names</returns>
/// <remarks>
/// Takes the output of TokenizeSql and creates a single string of SQL
/// that only contains '?' markers for each parameter. It also creates
/// the parameterMap array list that includes all the paramter names in the
/// order they appeared in the SQL
/// </remarks>
private List<string> PrepareCommandText(out string stripped_sql)
{
StringBuilder newSQL = new StringBuilder();
List<string> parameterMap = new List<string>();
int startPos = 0;
string sql = ResolvedCommandText;
MySqlTokenizer tokenizer = new MySqlTokenizer(sql);
string parameter = tokenizer.NextParameter();
while (parameter != null)
{
if (parameter.IndexOf(StoredProcedure.ParameterPrefix) == -1)
{
newSQL.Append(sql.Substring(startPos, tokenizer.StartIndex - startPos));
newSQL.Append("?");
parameterMap.Add(parameter);
startPos = tokenizer.StopIndex;
}
parameter = tokenizer.NextParameter();
}
newSQL.Append(sql.Substring(startPos));
stripped_sql = newSQL.ToString();
return parameterMap;
}
示例3: GetPossibleValues
private static List<string> GetPossibleValues(DataRow row)
{
var types = new string[] { "ENUM", "SET" };
var dtdIdentifier = row["DTD_IDENTIFIER"].ToString().Trim();
var index = 0;
for (; index < 2; index++)
if (dtdIdentifier.StartsWith(types[index], StringComparison.OrdinalIgnoreCase ))
break;
if (index == 2) return null;
dtdIdentifier = dtdIdentifier.Substring(types[index].Length).Trim();
dtdIdentifier = dtdIdentifier.Trim('(', ')').Trim();
var values = new List<string>();
var tokenzier = new MySqlTokenizer(dtdIdentifier);
var token = tokenzier.NextToken();
var start = tokenzier.StartIndex;
while (true)
{
if (token == null || token == ",")
{
var end = dtdIdentifier.Length - 1;
if (token == ",")
end = tokenzier.StartIndex;
var value = dtdIdentifier.Substring(start, end - start).Trim('\'', '\"').Trim();
values.Add(value);
start = tokenzier.StopIndex;
}
if (token == null) break;
token = tokenzier.NextToken();
}
return values;
}
示例4: ParseDataType
/// <summary>
/// Parses out the elements of a procedure parameter data type.
/// </summary>
private string ParseDataType(DataRow row, MySqlTokenizer tokenizer)
{
var dtd = new StringBuilder(
tokenizer.NextToken().ToUpper(CultureInfo.InvariantCulture));
row["DATA_TYPE"] = dtd.ToString();
var type = row["DATA_TYPE"].ToString();
var token = tokenizer.NextToken();
if (token == "(")
{
token = tokenizer.ReadParenthesis();
dtd.AppendFormat(CultureInfo.InvariantCulture, "{0}", token);
if (type != "ENUM" && type != "SET")
ParseDataTypeSize(row, token);
token = tokenizer.NextToken();
}
else
dtd.Append(GetDataTypeDefaults(type, row));
while (token != ")" &&
token != "," &&
String.Compare(token, "begin", true) != 0 &&
String.Compare(token, "return", true) != 0)
{
if (String.Compare(token, "CHARACTER", true) == 0 ||
String.Compare(token, "BINARY", true) == 0)
{ } // we don't need to do anything with this
else if (String.Compare(token, "SET", true) == 0 ||
String.Compare(token, "CHARSET", true) == 0)
row["CHARACTER_SET_NAME"] = tokenizer.NextToken();
else if (String.Compare(token, "ASCII", true) == 0)
row["CHARACTER_SET_NAME"] = "latin1";
else if (String.Compare(token, "UNICODE", true) == 0)
row["CHARACTER_SET_NAME"] = "ucs2";
else if (String.Compare(token, "COLLATE", true) == 0)
row["COLLATION_NAME"] = tokenizer.NextToken();
else
dtd.AppendFormat(CultureInfo.InvariantCulture, " {0}", token);
token = tokenizer.NextToken();
}
if (dtd.Length > 0)
row["DTD_IDENTIFIER"] = dtd.ToString();
// now default the collation if one wasn't given
if (row["COLLATION_NAME"].ToString().Length == 0 &&
row["CHARACTER_SET_NAME"].ToString().Length > 0)
row["COLLATION_NAME"] = CharSetMap.GetDefaultCollation(
row["CHARACTER_SET_NAME"].ToString(), connection);
// now set the octet length
if (row["CHARACTER_MAXIMUM_LENGTH"] != DBNull.Value)
row["CHARACTER_OCTET_LENGTH"] =
CharSetMap.GetMaxLength(row["CHARACTER_SET_NAME"].ToString(), connection) *
(int)row["CHARACTER_MAXIMUM_LENGTH"];
return token;
}
示例5: InternalBindParameters
private void InternalBindParameters(string sql, MySqlParameterCollection parameters,
MySqlPacket packet)
{
bool sqlServerMode = command.Connection.Settings.SqlServerMode;
if (packet == null)
{
packet = new MySqlPacket(Driver.Encoding);
packet.Version = Driver.Version;
packet.WriteByte(0);
}
MySqlTokenizer tokenizer = new MySqlTokenizer(sql);
tokenizer.ReturnComments = true;
tokenizer.SqlServerMode = sqlServerMode;
int pos = 0;
string token = tokenizer.NextToken();
while (token != null)
{
// serialize everything that came before the token (i.e. whitespace)
packet.WriteStringNoNull(sql.Substring(pos, tokenizer.StartIndex - pos));
pos = tokenizer.StopIndex;
if (MySqlTokenizer.IsParameter(token))
{
if (SerializeParameter(parameters, packet, token))
token = null;
}
if (token != null)
{
if (sqlServerMode && tokenizer.Quoted && token.StartsWith("["))
token = String.Format("`{0}`", token.Substring(1, token.Length - 2));
packet.WriteStringNoNull(token);
}
token = tokenizer.NextToken();
}
buffers.Add(packet);
}
示例6: ParseColumns
private static List<string> ParseColumns(MySqlTokenizer tokenizer)
{
List<string> sc = new List<string>();
string token = tokenizer.NextToken();
while (token != ")")
{
if (token != ",")
sc.Add(token);
token = tokenizer.NextToken();
}
return sc;
}
示例7: ParseProcedureBody
private void ParseProcedureBody(DataTable parametersTable, string body,
DataRow row, string nameToRestrict)
{
var modes = new ArrayList(new string[3] { "IN", "OUT", "INOUT" });
var sqlMode = row["SQL_MODE"].ToString();
var pos = 1;
var tokenizer = new MySqlTokenizer(body);
tokenizer.AnsiQuotes = sqlMode.IndexOf("ANSI_QUOTES") != -1;
tokenizer.BackslashEscapes = sqlMode.IndexOf("NO_BACKSLASH_ESCAPES") == -1;
tokenizer.ReturnComments = false;
var token = tokenizer.NextToken();
// this block will scan for the opening paren while also determining
// if this routine is a function. If so, then we need to add a
// parameter row for the return parameter since it is ordinal position
// 0 and should appear first.
while (token != "(")
{
if (String.Compare(token, "FUNCTION", true) == 0 &&
nameToRestrict == null)
{
parametersTable.Rows.Add(parametersTable.NewRow());
InitParameterRow(row, parametersTable.Rows[0]);
}
token = tokenizer.NextToken();
}
token = tokenizer.NextToken(); // now move to the next token past the (
while (token != ")")
{
var parmRow = parametersTable.NewRow();
InitParameterRow(row, parmRow);
parmRow["ORDINAL_POSITION"] = pos++;
// handle mode and name for the parameter
var mode = token.ToUpper(CultureInfo.InvariantCulture);
if (!tokenizer.Quoted && modes.Contains(mode))
{
parmRow["PARAMETER_MODE"] = mode;
token = tokenizer.NextToken();
}
if (tokenizer.Quoted)
token = token.Substring(1, token.Length - 2);
parmRow["PARAMETER_NAME"] = token;
// now parse data type
token = ParseDataType(parmRow, tokenizer);
if (token == ",")
token = tokenizer.NextToken();
// now determine if we should include this row after all
// we need to parse it before this check so we are correctly
// positioned for the next parameter
if (nameToRestrict == null ||
String.Compare(parmRow["PARAMETER_NAME"].ToString(), nameToRestrict, true) == 0)
parametersTable.Rows.Add(parmRow);
}
// now parse out the return parameter if there is one.
token = tokenizer.NextToken().ToUpper(CultureInfo.InvariantCulture);
if (String.Compare(token, "RETURNS", true) == 0)
{
var parameterRow = parametersTable.Rows[0];
parameterRow["PARAMETER_NAME"] = "RETURN_VALUE";
ParseDataType(parameterRow, tokenizer);
}
}
示例8: ParseConstraint
private static void ParseConstraint(DataTable fkTable, DataRow table,
MySqlTokenizer tokenizer, bool includeColumns)
{
string name = tokenizer.NextToken();
DataRow row = fkTable.NewRow();
// make sure this constraint is a FK
string token = tokenizer.NextToken();
if (token != "foreign" || tokenizer.Quoted)
return;
tokenizer.NextToken(); // read off the 'KEY' symbol
tokenizer.NextToken(); // read off the '(' symbol
row["CONSTRAINT_CATALOG"] = table["TABLE_CATALOG"];
row["CONSTRAINT_SCHEMA"] = table["TABLE_SCHEMA"];
row["TABLE_CATALOG"] = table["TABLE_CATALOG"];
row["TABLE_SCHEMA"] = table["TABLE_SCHEMA"];
row["TABLE_NAME"] = table["TABLE_NAME"];
row["REFERENCED_TABLE_CATALOG"] = null;
row["CONSTRAINT_NAME"] = name.Trim(new char[] { '\'', '`' });
ArrayList srcColumns = includeColumns ? ParseColumns(tokenizer) : null;
// now look for the references section
while (token != "references" || tokenizer.Quoted)
token = tokenizer.NextToken();
string target1 = tokenizer.NextToken();
string target2 = tokenizer.NextToken();
if (target2.StartsWith("."))
{
row["REFERENCED_TABLE_SCHEMA"] = target1;
row["REFERENCED_TABLE_NAME"] = target2.Substring(1).Trim(new char[] { '\'', '`' });
tokenizer.NextToken(); // read off the '('
}
else
{
row["REFERENCED_TABLE_SCHEMA"] = table["TABLE_SCHEMA"];
row["REFERENCED_TABLE_NAME"] = target1.Substring(1).Trim(new char[] { '\'', '`' }); ;
}
// if we are supposed to include columns, read the target columns
ArrayList targetColumns = includeColumns ? ParseColumns(tokenizer) : null;
if (includeColumns)
ProcessColumns(fkTable, row, srcColumns, targetColumns);
else
fkTable.Rows.Add(row);
}
示例9: ParseColumns
private static ArrayList ParseColumns(MySqlTokenizer tokenizer)
{
ArrayList sc = new ArrayList();
string token = tokenizer.NextToken();
while (token != ")")
{
if (token != ",")
sc.Add(token);
token = tokenizer.NextToken();
}
return sc;
}
示例10: GetCommandTextForBatching
internal string GetCommandTextForBatching()
{
if (batchableCommandText == null)
{
// if the command starts with insert and is "simple" enough, then
// we can use the multi-value form of insert
if (String.Compare(CommandText.Substring(0, 6), "INSERT", true) == 0)
{
MySqlCommand cmd = new MySqlCommand("SELECT @@sql_mode", Connection);
string sql_mode = cmd.ExecuteScalar().ToString().ToUpper(CultureInfo.InvariantCulture);
MySqlTokenizer tokenizer = new MySqlTokenizer(CommandText);
tokenizer.AnsiQuotes = sql_mode.IndexOf("ANSI_QUOTES") != -1;
tokenizer.BackslashEscapes = sql_mode.IndexOf("NO_BACKSLASH_ESCAPES") == -1;
string token = tokenizer.NextToken().ToLower(CultureInfo.InvariantCulture);
while (token != null)
{
if (token.ToUpper(CultureInfo.InvariantCulture) == "VALUES" &&
!tokenizer.Quoted)
{
token = tokenizer.NextToken();
Debug.Assert(token == "(");
while (token != null && token != ")")
{
batchableCommandText += token;
token = tokenizer.NextToken();
}
if (token != null)
batchableCommandText += token;
token = tokenizer.NextToken();
if (token != null && (token == "," ||
token.ToUpper(CultureInfo.InvariantCulture) == "ON"))
{
batchableCommandText = null;
break;
}
}
token = tokenizer.NextToken();
}
}
}
return batchableCommandText;
}
示例11: GetForeignKeysOnTable
/// <summary>
/// GetForeignKeysOnTable retrieves the foreign keys on the given table.
/// Since MySQL supports foreign keys on versions prior to 5.0, we can't use
/// information schema. MySQL also does not include any type of SHOW command
/// for foreign keys so we have to resort to use SHOW CREATE TABLE and parsing
/// the output.
/// </summary>
/// <param name="fkTable">The table to store the key info in.</param>
/// <param name="tableToParse">The table to get the foeign key info for.</param>
/// <param name="filterName">Only get foreign keys that match this name.</param>
/// <param name="includeColumns">Should column information be included in the table.</param>
private void GetForeignKeysOnTable(DataTable fkTable, DataRow tableToParse,
string filterName, bool includeColumns)
{
string sqlMode = GetSqlMode();
if (filterName != null)
filterName = filterName.ToLower(CultureInfo.InvariantCulture);
string sql = string.Format("SHOW CREATE TABLE `{0}`.`{1}`",
tableToParse["TABLE_SCHEMA"], tableToParse["TABLE_NAME"]);
string lowerBody = null, body = null;
MySqlCommand cmd = new MySqlCommand(sql, connection);
using (MySqlDataReader reader = cmd.ExecuteReader())
{
reader.Read();
body = reader.GetString(1);
lowerBody = body.ToLower(CultureInfo.InvariantCulture);
}
MySqlTokenizer tokenizer = new MySqlTokenizer(lowerBody);
tokenizer.AnsiQuotes = sqlMode.IndexOf("ANSI_QUOTES") != -1;
tokenizer.BackslashEscapes = sqlMode.IndexOf("NO_BACKSLASH_ESCAPES") != -1;
while (true)
{
string token = tokenizer.NextToken();
// look for a starting contraint
while (token != null && (token != "constraint" || tokenizer.Quoted))
token = tokenizer.NextToken();
if (token == null) break;
ParseConstraint(fkTable, tableToParse, tokenizer, includeColumns);
}
}
示例12: PrepareCommandText
/// <summary>
/// Prepares CommandText for use with the Prepare method
/// </summary>
/// <returns>Command text stripped of all paramter names</returns>
/// <remarks>
/// Takes the output of TokenizeSql and creates a single string of SQL
/// that only contains '?' markers for each parameter. It also creates
/// the parameterMap array list that includes all the paramter names in the
/// order they appeared in the SQL
/// </remarks>
private List<string> PrepareCommandText(out string stripped_sql)
{
StringBuilder newSQL = new StringBuilder();
List<string> parameterMap = new List<string>();
int startPos = 0;
string sql = ResolvedCommandText;
MySqlTokenizer tokenizer = new MySqlTokenizer(sql);
string parameter = tokenizer.NextParameter();
while (parameter != null)
{
parameter = tokenizer.NextParameter();
}
newSQL.Append(sql.Substring(startPos));
stripped_sql = newSQL.ToString();
return parameterMap;
}
示例13: InternalBindParameters
private void InternalBindParameters(string sql, MySqlParameterCollection parameters,
MySqlPacket packet)
{
if (packet == null)
{
packet = new MySqlPacket(Driver.Encoding);
packet.Version = Driver.Version;
packet.WriteByte(0);
}
int startPos = 0;
MySqlTokenizer tokenizer = new MySqlTokenizer(sql);
tokenizer.ReturnComments = true;
string parameter = tokenizer.NextParameter();
while (parameter != null)
{
packet.WriteStringNoNull(sql.Substring(startPos, tokenizer.StartIndex - startPos));
bool serialized = SerializeParameter(parameters, packet, parameter);
startPos = tokenizer.StopIndex;
if (!serialized)
startPos = tokenizer.StartIndex;
parameter = tokenizer.NextParameter();
}
packet.WriteStringNoNull(sql.Substring(startPos));
buffers.Add(packet);
}
示例14: GetTargetedTable
private string GetTargetedTable(string sql)
{
MySqlTokenizer tokenizer = new MySqlTokenizer(sql);
tokenizer.ReturnComments = false;
tokenizer.AnsiQuotes = sql_mode.ToLowerInvariant().Contains("ansi_quotes");
tokenizer.BackslashEscapes = !sql_mode.ToLowerInvariant().Contains("no_backslash_escapes");
string token = null;
while (token != "ON" || tokenizer.Quoted)
token = tokenizer.NextToken();
string tableName = tokenizer.NextToken();
if (tokenizer.NextToken() == ".")
tableName = tokenizer.NextToken();
if (tableName.StartsWith("`", StringComparison.Ordinal))
return tableName.Trim('`');
if (tableName.StartsWith("\"", StringComparison.Ordinal) && tokenizer.AnsiQuotes)
return tableName.Trim('"');
return tableName;
}
示例15: BreakIntoStatements
private List<ScriptStatement> BreakIntoStatements(bool ansiQuotes, bool noBackslashEscapes)
{
string currentDelimiter = Delimiter;
int startPos = 0;
List<ScriptStatement> statements = new List<ScriptStatement>();
List<int> lineNumbers = BreakScriptIntoLines();
MySqlTokenizer tokenizer = new MySqlTokenizer(query);
tokenizer.AnsiQuotes = ansiQuotes;
tokenizer.BackslashEscapes = !noBackslashEscapes;
string token = tokenizer.NextToken();
while (token != null)
{
if (!tokenizer.Quoted)
{
if (token.ToLower(CultureInfo.InvariantCulture) == "delimiter")
{
tokenizer.NextToken();
AdjustDelimiterEnd(tokenizer);
currentDelimiter = query.Substring(tokenizer.StartIndex,
tokenizer.StopIndex - tokenizer.StartIndex).Trim();
startPos = tokenizer.StopIndex;
}
else
{
// this handles the case where our tokenizer reads part of the
// delimiter
if (currentDelimiter.StartsWith(token, StringComparison.OrdinalIgnoreCase))
{
if ((tokenizer.StartIndex + currentDelimiter.Length) <= query.Length)
{
if (query.Substring(tokenizer.StartIndex, currentDelimiter.Length) == currentDelimiter)
{
token = currentDelimiter;
tokenizer.Position = tokenizer.StartIndex + currentDelimiter.Length;
tokenizer.StopIndex = tokenizer.Position;
}
}
}
int delimiterPos = token.IndexOf(currentDelimiter, StringComparison.OrdinalIgnoreCase);
if (delimiterPos != -1)
{
int endPos = tokenizer.StopIndex - token.Length + delimiterPos;
if (tokenizer.StopIndex == query.Length - 1)
endPos++;
string currentQuery = query.Substring(startPos, endPos - startPos);
ScriptStatement statement = new ScriptStatement();
statement.text = currentQuery.Trim();
statement.line = FindLineNumber(startPos, lineNumbers);
statement.position = startPos - lineNumbers[statement.line];
statements.Add(statement);
startPos = endPos + currentDelimiter.Length;
}
}
}
token = tokenizer.NextToken();
}
// now clean up the last statement
if (startPos < query.Length - 1)
{
string sqlLeftOver = query.Substring(startPos).Trim();
if (!String.IsNullOrEmpty(sqlLeftOver))
{
ScriptStatement statement = new ScriptStatement();
statement.text = sqlLeftOver;
statement.line = FindLineNumber(startPos, lineNumbers);
statement.position = startPos - lineNumbers[statement.line];
statements.Add(statement);
}
}
return statements;
}