How to efficiently write to file from SQL datareader in c#?

前端 未结 7 1209
隐瞒了意图╮
隐瞒了意图╮ 2020-12-15 00:14

I have a remote sql connection in C# that needs to execute a query and save its results to the users\'s local hard disk. There is a fairly large amount of data this thing ca

相关标签:
7条回答
  • 2020-12-15 00:52

    I came up with this, it's a better CSV writer than the other answers:

    public static class DataReaderExtension
    {
        public static void ToCsv(this IDataReader dataReader, string fileName, bool includeHeaderAsFirstRow)
        {
    
            const string Separator = ",";
    
            StreamWriter streamWriter = new StreamWriter(fileName);
    
            StringBuilder sb = null;
    
            if (includeHeaderAsFirstRow)
            {
                sb = new StringBuilder();
                for (int index = 0; index < dataReader.FieldCount; index++)
                {
                    if (dataReader.GetName(index) != null)
                        sb.Append(dataReader.GetName(index));
    
                    if (index < dataReader.FieldCount - 1)
                        sb.Append(Separator);
                }
                streamWriter.WriteLine(sb.ToString());
            }
    
            while (dataReader.Read())
            {
                sb = new StringBuilder();
                for (int index = 0; index < dataReader.FieldCount; index++)
                {
                    if (!dataReader.IsDBNull(index))
                    {
                        string value = dataReader.GetValue(index).ToString();
                        if (dataReader.GetFieldType(index) == typeof(String))
                        {
                            if (value.IndexOf("\"") >= 0)
                                value = value.Replace("\"", "\"\"");
    
                            if (value.IndexOf(Separator) >= 0)
                                value = "\"" + value + "\"";
                        }
                        sb.Append(value);
                    }
    
                    if (index < dataReader.FieldCount - 1)
                        sb.Append(Separator);
                }
    
                if (!dataReader.IsDBNull(dataReader.FieldCount - 1))
                    sb.Append(dataReader.GetValue(dataReader.FieldCount - 1).ToString().Replace(Separator, " "));
    
                streamWriter.WriteLine(sb.ToString());
            }
            dataReader.Close();
            streamWriter.Close();
        }
    }
    

    usage: mydataReader.ToCsv("myfile.csv", true)

    0 讨论(0)
  • 2020-12-15 00:52

    I used .CSV to export data from database by DataReader. in my project i read datareader and create .CSV file manualy. in a loop i read datareader and for every rows i append cell value to result string. for separate columns i use "," and for separate rows i use "\n". finally i saved result string as result.csv.

    I suggest this high performance extension. i tested it and quickly export 600,000 rows as .CSV .

    0 讨论(0)
  • 2020-12-15 00:54

    Rob Sedgwick answer is more like it, but can be improved and simplified. This is how I did it:

    string separator = ";";
    string fieldDelimiter = "";
    bool useHeaders = true;
    
    string connectionString = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
    
    using (SqlConnection conn = new SqlConnection(connectionString))
    {
         using (SqlCommand cmd = conn.CreateCommand())
         {
              conn.Open();
              string query = @"SELECT whatever";
    
              cmd.CommandText = query;
    
              using (SqlDataReader reader = cmd.ExecuteReader())
              {
                    if (!reader.Read())
                    {
                         return;
                    }
    
                    List<string> columnNames = GetColumnNames(reader);
    
                    // Write headers if required
                    if (useHeaders)
                    {
                         first = true;
                         foreach (string columnName in columnNames)
                         {
                              response.Write(first ? string.Empty : separator);
                              line = string.Format("{0}{1}{2}", fieldDelimiter, columnName, fieldDelimiter);
                              response.Write(line);
                              first = false;
                         }
    
                         response.Write("\n");
                    }
    
                    // Write all records
                    do
                    {
                         first = true;
                         foreach (string columnName in columnNames)
                         {
                              response.Write(first ? string.Empty : separator);
                              string value = reader[columnName] == null ? string.Empty : reader[columnName].ToString();
                              line = string.Format("{0}{1}{2}", fieldDelimiter, value, fieldDelimiter);
                              response.Write(line);
                              first = false;
                         }
    
                         response.Write("\n");
                    }
                    while (reader.Read());
              }
         }
    }
    

    And you need to have a function GetColumnNames:

    List<string> GetColumnNames(IDataReader reader)
    {
        List<string> columnNames = new List<string>();
        for (int i = 0; i < reader.FieldCount; i++)
        {
             columnNames.Add(reader.GetName(i));
        }
    
        return columnNames;
    }
    
    0 讨论(0)
  • 2020-12-15 00:56

    Using the response object without a response.Close() causes at least in some instances the html of the page writing out the data to be written to the file. If you use Response.Close() the connection can be closed prematurely and cause an error producing the file.

    It is recommended to use the HttpApplication.CompleteRequest() however this appears to always cause the html to be written to the end of the file.

    I have tried the stream in conjunction with the response object and have had success in the development environment. I have not tried it in production yet.

    0 讨论(0)
  • 2020-12-15 01:02

    You are on the right track yourself. Use a loop with while(myReader.Read(){...} and write each record to the text file inside the loop. The .NET framework and operating system will take care of flushing the buffers to disk in an efficient way.

    using(SqlConnection conn = new SqlConnection(connectionString))
    using(SqlCommand cmd = conn.CreateCommand())
    {
      conn.Open();
      cmd.CommandText = QueryLoader.ReadQueryFromFileWithBdateEdate(
        @"Resources\qrs\qryssysblo.q", newdate, newdate);
    
      using(SqlDataReader reader = cmd.ExecuteReader())
      using(StreamWriter writer = new StreamWriter("c:\temp\file.txt"))
      {
        while(reader.Read())
        {
          // Using Name and Phone as example columns.
          writer.WriteLine("Name: {0}, Phone : {1}", 
            reader["Name"], reader["Phone"]);
        }
      }
    }
    
    0 讨论(0)
  • 2020-12-15 01:03

    I agree that your best bet here would be to use a SqlDataReader. Something like this:

    StreamWriter YourWriter = new StreamWriter(@"c:\testfile.txt");
    SqlCommand YourCommand = new SqlCommand();
    SqlConnection YourConnection = new SqlConnection(YourConnectionString);
    YourCommand.Connection = YourConnection;
    YourCommand.CommandText = myQuery;
    
    YourConnection.Open();
    
    using (YourConnection)
    {
        using (SqlDataReader sdr = YourCommand.ExecuteReader())
            using (YourWriter)
            {
                while (sdr.Read())
                    YourWriter.WriteLine(sdr[0].ToString() + sdr[1].ToString() + ",");
    
            }
    }
    

    Mind you, in the while loop, you can write that line to the text file in any format you see fit with the column data from the SqlDataReader.

    0 讨论(0)
提交回复
热议问题