diff --git a/LINQtoCSV.Tests/CsvContextReadTests.cs b/LINQtoCSV.Tests/CsvContextReadTests.cs
index b8f57d8..761c783 100644
--- a/LINQtoCSV.Tests/CsvContextReadTests.cs
+++ b/LINQtoCSV.Tests/CsvContextReadTests.cs
@@ -1,15 +1,12 @@
-using LINQtoCSV;
-using Microsoft.VisualStudio.TestTools.UnitTesting;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
-using System.IO;
-using System.Collections.Generic;
namespace LINQtoCSV.Tests
{
- [TestClass()]
+ [TestClass]
public class CsvContextReadTests : Test
{
- [TestMethod()]
+ [TestMethod]
public void GoodFileUsingOutputFormatForParsingDatesCharUSEnglish()
{
// Arrange
@@ -18,9 +15,9 @@ public void GoodFileUsingOutputFormatForParsingDatesCharUSEnglish()
{
SeparatorChar = ';',
FirstLineHasColumnNames = false,
- UseOutputFormatForParsingCsvValue = true,
+ UseOutputFormatForParsingCsvValue = true,
EnforceCsvColumnAttribute = true, // default is false
- FileCultureName = "en-US" // default is the current culture
+ FileCultureName = "en-US", // default is the current culture
};
string testInput =
@@ -28,14 +25,18 @@ public void GoodFileUsingOutputFormatForParsingDatesCharUSEnglish()
"BBBBBBBB;051212" + Environment.NewLine +
"CCCCCCCC;122308";
- var expected = new[] {
- new ProductDataParsingOutputFormat() {
+ var expected = new[]
+ {
+ new ProductDataParsingOutputFormat
+ {
name = "AAAAAAAA", startDate = new DateTime(2008, 5, 23),
},
- new ProductDataParsingOutputFormat {
- name = "BBBBBBBB", startDate = new DateTime(2012, 5, 12),
+ new ProductDataParsingOutputFormat
+ {
+ name = "BBBBBBBB", startDate = new DateTime(2012, 5, 12),
},
- new ProductDataParsingOutputFormat {
+ new ProductDataParsingOutputFormat
+ {
name = "CCCCCCCC", startDate = new DateTime(2008, 12, 23),
}
};
@@ -45,7 +46,7 @@ public void GoodFileUsingOutputFormatForParsingDatesCharUSEnglish()
AssertRead(testInput, fileDescription_namesUs, expected);
}
- [TestMethod()]
+ [TestMethod]
public void GoodFileNoSeparatorCharUseOutputFormatForParsingUSEnglish()
{
// Arrange
@@ -64,14 +65,18 @@ public void GoodFileNoSeparatorCharUseOutputFormatForParsingUSEnglish()
BBBBBBBB10.31105/12/12\n
CCCCCCCC12.00012/23/08";
- var expected = new[] {
- new ProductDataCharLength() {
+ var expected = new[]
+ {
+ new ProductDataCharLength
+ {
name = "AAAAAAAA", weight = 34.184, startDate = new DateTime(2008, 5, 23),
},
- new ProductDataCharLength {
- name = "BBBBBBBB", weight = 10.311, startDate = new DateTime(2012, 5, 12),
+ new ProductDataCharLength
+ {
+ name = "BBBBBBBB", weight = 10.311, startDate = new DateTime(2012, 5, 12),
},
- new ProductDataCharLength {
+ new ProductDataCharLength
+ {
name = "CCCCCCCC", weight = 12.000, startDate = new DateTime(2008, 12, 23),
}
};
@@ -81,7 +86,7 @@ public void GoodFileNoSeparatorCharUseOutputFormatForParsingUSEnglish()
AssertRead(testInput, fileDescription_namesUs, expected);
}
- [TestMethod()]
+ [TestMethod]
public void GoodFileNoSeparatorCharUSEnglish()
{
// Arrange
@@ -100,14 +105,18 @@ public void GoodFileNoSeparatorCharUSEnglish()
BBBBBBBB10.31105/12/12\n
CCCCCCCC12.00012/23/08";
- var expected = new[] {
- new ProductDataCharLength() {
+ var expected = new[]
+ {
+ new ProductDataCharLength
+ {
name = "AAAAAAAA", weight = 34.184, startDate = new DateTime(2008, 5, 23),
},
- new ProductDataCharLength {
- name = "BBBBBBBB", weight = 10.311, startDate = new DateTime(2012, 5, 12),
+ new ProductDataCharLength
+ {
+ name = "BBBBBBBB", weight = 10.311, startDate = new DateTime(2012, 5, 12),
},
- new ProductDataCharLength {
+ new ProductDataCharLength
+ {
name = "CCCCCCCC", weight = 12.000, startDate = new DateTime(2008, 12, 23),
}
};
@@ -117,7 +126,7 @@ public void GoodFileNoSeparatorCharUSEnglish()
AssertRead(testInput, fileDescription_namesUs, expected);
}
- [TestMethod()]
+ [TestMethod]
public void GoodFileCommaDelimitedUseFieldIndexForReadingDataCharUSEnglish()
{
// Arrange
@@ -137,14 +146,18 @@ public void GoodFileCommaDelimitedUseFieldIndexForReadingDataCharUSEnglish()
"BBBBBBBB,__,10.311,05/12/12" + Environment.NewLine +
"CCCCCCCC,__,12.000,12/23/08";
- var expected = new[] {
- new ProductDataSpecificFieldIndex() {
+ var expected = new[]
+ {
+ new ProductDataSpecificFieldIndex
+ {
name = "AAAAAAAA", weight = 34.184, startDate = new DateTime(2008, 5, 23),
},
- new ProductDataSpecificFieldIndex {
- name = "BBBBBBBB", weight = 10.311, startDate = new DateTime(2012, 5, 12),
+ new ProductDataSpecificFieldIndex
+ {
+ name = "BBBBBBBB", weight = 10.311, startDate = new DateTime(2012, 5, 12),
},
- new ProductDataSpecificFieldIndex {
+ new ProductDataSpecificFieldIndex
+ {
name = "CCCCCCCC", weight = 12.000, startDate = new DateTime(2008, 12, 23),
}
};
@@ -154,7 +167,7 @@ public void GoodFileCommaDelimitedUseFieldIndexForReadingDataCharUSEnglish()
AssertRead(testInput, fileDescription_namesUs, expected);
}
- [TestMethod()]
+ [TestMethod]
public void GoodFileCommaDelimitedUseFieldIndexForReadingDataCharUseOutputFormatForParsingUSEnglish()
{
// Arrange
@@ -176,14 +189,18 @@ public void GoodFileCommaDelimitedUseFieldIndexForReadingDataCharUseOutputFormat
"BBBBBBBB,__,10.311,05/12/12" + Environment.NewLine +
"CCCCCCCC,__,12.000,12/23/08";
- var expected = new[] {
- new ProductDataSpecificFieldIndex() {
+ var expected = new[]
+ {
+ new ProductDataSpecificFieldIndex
+ {
name = "AAAAAAAA", weight = 34.184, startDate = new DateTime(2008, 5, 23),
},
- new ProductDataSpecificFieldIndex {
- name = "BBBBBBBB", weight = 10.311, startDate = new DateTime(2012, 5, 12),
+ new ProductDataSpecificFieldIndex
+ {
+ name = "BBBBBBBB", weight = 10.311, startDate = new DateTime(2012, 5, 12),
},
- new ProductDataSpecificFieldIndex {
+ new ProductDataSpecificFieldIndex
+ {
name = "CCCCCCCC", weight = 12.000, startDate = new DateTime(2008, 12, 23),
}
};
@@ -193,8 +210,7 @@ public void GoodFileCommaDelimitedUseFieldIndexForReadingDataCharUseOutputFormat
AssertRead(testInput, fileDescription_namesUs, expected);
}
-
- [TestMethod()]
+ [TestMethod]
public void GoodFileCommaDelimitedNamesInFirstLineUSEnglish()
{
// Arrange
@@ -216,13 +232,16 @@ two newlines
and a quoted """"string""""""
dog house, ""45,230,990"",29 Feb 2004, , -56, True,"""", FF10, ""12,008""";
- var expected = new [] {
- new ProductData {
+ var expected = new[]
+ {
+ new ProductData
+ {
name = "moonbuggy", weight = 34.184, startDate = new DateTime(2008, 5, 23), launchTime = new DateTime(2009, 5, 5, 16, 11, 0),
nbrAvailable = 1205, onsale = true, shopsAvailable = "Paris, New York", hexProductCode = 31, retailPrice = 540.12M,
description = "newly launched product"
},
- new ProductData {
+ new ProductData
+ {
name = "mouse trap", weight = 45E-5, startDate = new DateTime(1985, 1, 2), launchTime = new DateTime(1988, 8, 7, 0, 0, 0),
nbrAvailable = 4030, onsale = false, shopsAvailable = @"This field has
a newline", hexProductCode = 256, retailPrice = 78300M,
@@ -230,7 +249,8 @@ and a quoted """"string""""""
two newlines
and a quoted ""string"""
},
- new ProductData {
+ new ProductData
+ {
name = "dog house", weight = 45230990, startDate = new DateTime(2004, 2, 29), launchTime = default(DateTime),
nbrAvailable = -56, onsale = true, shopsAvailable = "", hexProductCode = 65296, retailPrice = 12008M,
description = null
@@ -242,7 +262,7 @@ and a quoted ""string"""
AssertRead(testInput, fileDescription_namesUs, expected);
}
- [TestMethod()]
+ [TestMethod]
public void GoodFileTabDelimitedNoNamesInFirstLineNLnl()
{
// Arrange
@@ -257,13 +277,16 @@ public void GoodFileTabDelimitedNoNamesInFirstLineNLnl()
string testInput =
"moonbuggy\t 23/5/08\t 5-Mei-2009 16:11 pm\t 34.184\t \"Paris, New York\"\t 1F\t €540,12\t true\t newly launched product\r\n\"mouse trap\"\t 2/1/1985\t \"7 Augustus 1988\t 0:00\"\t45E-5\t \"This field has\r\na newline\"\t 100\t \"€78.300\"\t FALSE\t \"This field has quotes(\"\"), and\r\ntwo newlines\r\nand a quoted \"\"string\"\"\"\r\ndog house\t29 Feb 2004\t \t \"45.230.990\"\t\"\"\t FF10\t \"12.008\"\t True";
- var expected = new[] {
- new ProductData {
+ var expected = new[]
+ {
+ new ProductData
+ {
name = "moonbuggy", weight = 34184, startDate = new DateTime(2008, 5, 23), launchTime = new DateTime(2009, 5, 5, 16, 11, 0),
nbrAvailable = 0, onsale = true, shopsAvailable = "Paris, New York", hexProductCode = 31, retailPrice = 540.12M,
description = "newly launched product"
},
- new ProductData {
+ new ProductData
+ {
name = "mouse trap", weight = 45E-5, startDate = new DateTime(1985, 1, 2), launchTime = new DateTime(1988, 8, 7, 0, 0, 0),
nbrAvailable = 0, onsale = false, shopsAvailable = @"This field has
a newline", hexProductCode = 256, retailPrice = 78300M,
@@ -271,7 +294,8 @@ public void GoodFileTabDelimitedNoNamesInFirstLineNLnl()
two newlines
and a quoted ""string"""
},
- new ProductData {
+ new ProductData
+ {
name = "dog house", weight = 45230990, startDate = new DateTime(2004, 2, 29), launchTime = default(DateTime),
nbrAvailable = 0, onsale = true, shopsAvailable = "", hexProductCode = 65296, retailPrice = 12008M,
description = null
@@ -283,7 +307,7 @@ and a quoted ""string"""
AssertRead(testInput, fileDescription_nonamesNl, expected);
}
- [TestMethod()]
+ [TestMethod]
public void GoodFileCommaDelimitedWithTrailingSeparatorChars()
{
// Arrange
@@ -306,13 +330,16 @@ two newlines
and a quoted """"string""""""
dog house, ""45,230,990"",29 Feb 2004, , -56, True,"""", FF10, ""12,008"",";
- var expected = new[] {
- new ProductData {
+ var expected = new[]
+ {
+ new ProductData
+ {
name = "moonbuggy", weight = 34.184, startDate = new DateTime(2008, 5, 23), launchTime = new DateTime(2009, 5, 5, 16, 11, 0),
nbrAvailable = 1205, onsale = true, shopsAvailable = "Paris, New York", hexProductCode = 31, retailPrice = 540.12M,
description = "newly launched product"
},
- new ProductData {
+ new ProductData
+ {
name = "mouse trap", weight = 45E-5, startDate = new DateTime(1985, 1, 2), launchTime = new DateTime(1988, 8, 7, 0, 0, 0),
nbrAvailable = 4030, onsale = false, shopsAvailable = @"This field has
a newline", hexProductCode = 256, retailPrice = 78300M,
@@ -320,7 +347,8 @@ and a quoted """"string""""""
two newlines
and a quoted ""string"""
},
- new ProductData {
+ new ProductData
+ {
name = "dog house", weight = 45230990, startDate = new DateTime(2004, 2, 29), launchTime = default(DateTime),
nbrAvailable = -56, onsale = true, shopsAvailable = "", hexProductCode = 65296, retailPrice = 12008M,
description = null
@@ -332,17 +360,18 @@ and a quoted ""string"""
AssertRead(testInput, fileDescription_namesUs, expected);
}
- [TestMethod()]
- public void FileWithUnknownColumns_ShouldDiscardColumns() {
+ [TestMethod]
+ public void FileWithUnknownColumns_ShouldDiscardColumns()
+ {
var description = new CsvFileDescription
- {
- SeparatorChar = ',',
- FirstLineHasColumnNames = true,
- IgnoreUnknownColumns = true,
- };
-
+ {
+ SeparatorChar = ',',
+ FirstLineHasColumnNames = true,
+ IgnoreUnknownColumns = true,
+ };
+
//The following input has 5 columns: Id | Name | Last Name | Age | City. Only the Name, Last Name and Age will be read.
-
+
string input =
@"Id,Name,Last Name,Age,City
1,John,Doe,15,Washington
@@ -365,7 +394,6 @@ public void FileWithUnknownColumns_ShouldDiscardColumns() {
};
AssertRead(input, description, expected);
-
}
}
-}
+}
\ No newline at end of file
diff --git a/LINQtoCSV.Tests/CsvContextWriteTests.cs b/LINQtoCSV.Tests/CsvContextWriteTests.cs
index aebc0a1..919692f 100644
--- a/LINQtoCSV.Tests/CsvContextWriteTests.cs
+++ b/LINQtoCSV.Tests/CsvContextWriteTests.cs
@@ -1,16 +1,14 @@
-using LINQtoCSV;
-using Microsoft.VisualStudio.TestTools.UnitTesting;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
-using System.IO;
using System.Collections.Generic;
using System.Text;
namespace LINQtoCSV.Tests
{
- [TestClass()]
+ [TestClass]
public class CsvContextWriteTests : Test
{
- [TestMethod()]
+ [TestMethod]
public void GoodFileCommaDelimitedNamesInFirstLineNLnl()
{
// Arrange
diff --git a/LINQtoCSV/CsvColumnAttribute.cs b/LINQtoCSV/CsvColumnAttribute.cs
index a1c259d..558f827 100644
--- a/LINQtoCSV/CsvColumnAttribute.cs
+++ b/LINQtoCSV/CsvColumnAttribute.cs
@@ -3,23 +3,26 @@
namespace LINQtoCSV
{
-
///
- /// Summary description for CsvColumnAttribute
+ /// When set to a field or property, allows the framework to recognize it from a CSV file, according to the rules specified.
///
-
- [System.AttributeUsage(System.AttributeTargets.Field | System.AttributeTargets.Property,
- AllowMultiple = false)
- ]
- public class CsvColumnAttribute : System.Attribute
+ [AttributeUsage(AttributeTargets.Field | AttributeTargets.Property, AllowMultiple = false)]
+ public class CsvColumnAttribute : Attribute
{
- internal const int mc_DefaultFieldIndex = Int32.MaxValue;
+ internal const int mc_DefaultFieldIndex = int.MaxValue;
public string Name { get; set; }
+
public bool CanBeNull { get; set; }
+
public int FieldIndex { get; set; }
+
public NumberStyles NumberStyle { get; set; }
+
+ public DateTimeStyles DateTimeStyle { get; set; }
+
public string OutputFormat { get; set; }
+
public int CharLength { get; set; }
public CsvColumnAttribute()
@@ -28,24 +31,27 @@ public CsvColumnAttribute()
FieldIndex = mc_DefaultFieldIndex;
CanBeNull = true;
NumberStyle = NumberStyles.Any;
+ DateTimeStyle = DateTimeStyles.AssumeUniversal | DateTimeStyles.AllowWhiteSpaces;
OutputFormat = "G";
}
public CsvColumnAttribute(
- string name,
- int fieldIndex,
+ string name,
+ int fieldIndex,
bool canBeNull,
string outputFormat,
NumberStyles numberStyle,
+ DateTimeStyles dateTimeStyle,
int charLength)
{
Name = name;
FieldIndex = fieldIndex;
CanBeNull = canBeNull;
NumberStyle = numberStyle;
+ DateTimeStyle = dateTimeStyle;
OutputFormat = outputFormat;
CharLength = charLength;
}
}
-}
+}
\ No newline at end of file
diff --git a/LINQtoCSV/CsvContext.cs b/LINQtoCSV/CsvContext.cs
index 62d799c..92e7fe8 100644
--- a/LINQtoCSV/CsvContext.cs
+++ b/LINQtoCSV/CsvContext.cs
@@ -4,86 +4,73 @@
namespace LINQtoCSV
{
-
///
- /// Summary description for CsvContext
+ /// A context provider for reading values from CSV files.
///
public class CsvContext
{
- /// ///////////////////////////////////////////////////////////////////////
- /// Read
- ///
+ #region Read
+
///
- /// Reads the comma separated values from a stream or file.
- /// Returns the data into an IEnumerable that can be used for LINQ queries.
- ///
- /// The stream or file will be closed after the last line has been processed.
- /// Because the library implements deferred reading (using Yield Return), this may not happen
- /// for a while.
+ /// Reads the comma separated values from a stream or file and returns the data into an
+ /// that can be used for LINQ queries.
///
///
- /// The records in the returned IEnumerable will be of this type.
+ /// The records in the returned will be of this type.
///
///
- /// The data will be read from this stream.
- ///
- ///
- /// Additional information how the input file is to be interpreted, such as the culture of the input dates.
+ /// All data is read from this stream, unless fileName is not null.
+ ///
+ /// This is a rather then a ,
+ /// because we need to be able to seek back to the start of the
+ /// stream, and you can't do that with a (or ).
+ ///
///
///
/// Values read from the stream or file.
///
- public IEnumerable Read(string fileName, CsvFileDescription fileDescription) where T : class, new()
- {
- // Note that ReadData will not be called right away, but when the returned
- // IEnumerable actually gets accessed.
-
- IEnumerable ie = ReadData(fileName, null, fileDescription);
- return ie;
- }
+ ///
+ /// The stream or file will not be closed after the last line has been processed.
+ /// Because the library implements deferred reading (using Yield Return), please be careful
+ /// about closing the stream reader.
+ ///
public IEnumerable Read(StreamReader stream) where T : class, new()
{
return Read(stream, new CsvFileDescription());
}
- public IEnumerable Read(string fileName) where T : class, new()
- {
- return Read(fileName, new CsvFileDescription());
- }
-
- public IEnumerable Read(StreamReader stream, CsvFileDescription fileDescription) where T : class, new()
- {
- return ReadData(null, stream, fileDescription);
- }
-
- /// ///////////////////////////////////////////////////////////////////////
- /// ReadData
///
- ///
+ /// Reads the comma separated values from a stream or file and returns the data into an
+ /// that can be used for LINQ queries.
///
- ///
- ///
- /// Name of the file associated with the stream.
- ///
- /// If this is not null, a file is opened with this name.
- /// If this is null, the method attempts to read from the passed in stream.
- ///
+ ///
+ /// The records in the returned will be of this type.
+ ///
///
- /// All data is read from this stream, unless fileName is not null.
- ///
- /// This is a StreamReader rather then a TextReader,
+ /// All data is read from this stream, unless fileName is not null.
+ ///
+ /// This is a rather then a ,
/// because we need to be able to seek back to the start of the
- /// stream, and you can't do that with a TextReader (or StringReader).
+ /// stream, and you can't do that with a (or ).
+ ///
+ ///
+ ///
+ /// Additional information how the input file is to be interpreted, such as the culture of the input dates.
///
- ///
- ///
- private IEnumerable ReadData(
- string fileName,
- StreamReader stream,
+ ///
+ /// Values read from the stream or file.
+ ///
+ ///
+ /// The stream or file will not be closed after the last line has been processed.
+ /// Because the library implements deferred reading (using Yield Return), please be careful
+ /// about closing the stream reader.
+ ///
+ public IEnumerable Read(
+ StreamReader stream,
CsvFileDescription fileDescription) where T : class, new()
{
- // If T implements IDataRow, then we're reading raw data rows
+ // If T implements IDataRow, then we're reading raw data rows
bool readingRawDataRows = typeof(IDataRow).IsAssignableFrom(typeof(T));
// The constructor for FieldMapper_Reading will throw an exception if there is something
@@ -96,40 +83,28 @@ private IEnumerable ReadData(
if (!readingRawDataRows)
{
- fm = new FieldMapper_Reading(fileDescription, fileName, false);
+ fm = new FieldMapper_Reading(fileDescription, false);
}
// -------
- // Each time the IEnumerable that is returned from this method is
+ // Each time the IEnumerable that is returned from this method is
// accessed in a foreach, ReadData is called again (not the original Read overload!)
//
// So, open the file here, or rewind the stream.
- bool readingFile = !string.IsNullOrEmpty(fileName);
+ // Rewind the stream
- if (readingFile)
+ if ((stream == null) || (!stream.BaseStream.CanSeek))
{
- stream = new StreamReader(
- fileName,
- fileDescription.TextEncoding,
- fileDescription.DetectEncodingFromByteOrderMarks);
+ throw new BadStreamException();
}
- else
- {
- // Rewind the stream
- if ((stream == null) || (!stream.BaseStream.CanSeek))
- {
- throw new BadStreamException();
- }
-
- stream.BaseStream.Seek(0, SeekOrigin.Begin);
- }
+ stream.BaseStream.Seek(0, SeekOrigin.Begin);
// ----------
CsvStream cs = new CsvStream(stream, null, fileDescription.SeparatorChar, fileDescription.IgnoreTrailingSeparatorChar);
-
+
// If we're reading raw data rows, instantiate a T so we return objects
// of the type specified by the caller.
// Otherwise, instantiate a DataRow, which also implements IDataRow.
@@ -144,7 +119,7 @@ private IEnumerable ReadData(
}
AggregatedException ae =
- new AggregatedException(typeof(T).ToString(), fileName, fileDescription.MaximumNbrExceptions);
+ new AggregatedException(typeof(T).ToString(), fileDescription.MaximumNbrExceptions);
try
{
@@ -160,9 +135,9 @@ private IEnumerable ReadData(
// Skip empty lines.
// Important. If there is a newline at the end of the last data line, the code
// thinks there is an empty line after that last data line.
- if ((row.Count == 1) &&
+ if ((row.Count == 1) &&
((row[0].Value == null) ||
- (string.IsNullOrEmpty(row[0].Value.Trim())) ))
+ (string.IsNullOrEmpty(row[0].Value.Trim()))))
{
continue;
}
@@ -188,13 +163,13 @@ private IEnumerable ReadData(
catch (AggregatedException ae2)
{
// Seeing that the AggregatedException was thrown, maximum number of exceptions
- // must have been reached, so rethrow.
+ // must have been reached, so re-throw.
// Catch here, so you don't add an AggregatedException to an AggregatedException
throw ae2;
}
catch (Exception e)
{
- // Store the exception in the AggregatedException ae.
+ // Store the exception in the AggregatedException "ae".
// That way, if a file has many errors leading to exceptions,
// you get them all in one go, packaged in a single aggregated exception.
ae.AddException(e);
@@ -207,64 +182,31 @@ private IEnumerable ReadData(
}
finally
{
- if (readingFile)
- {
- stream.Close();
- }
-
// If any exceptions were raised while reading the data from the file,
- // they will have been stored in the AggregatedException ae.
- // In that case, time to throw ae.
+ // they will have been stored in the AggregatedException "ae".
+ // In that case, time to throw "ae".
ae.ThrowIfExceptionsStored();
}
}
+ #endregion Read
+
/// ///////////////////////////////////////////////////////////////////////
/// Write
- ///
- public void Write(
- IEnumerable values,
- string fileName,
- CsvFileDescription fileDescription)
- {
- using (StreamWriter sw = new StreamWriter(
- fileName,
- false,
- fileDescription.TextEncoding))
- {
- WriteData(values, fileName, sw, fileDescription);
- }
- }
-
+ ///
public void Write(
IEnumerable values,
- TextWriter stream)
+ TextWriter stream)
{
- Write(values, stream, new CsvFileDescription());
- }
-
- public void Write(
- IEnumerable values,
- string fileName)
- {
- Write(values, fileName, new CsvFileDescription());
+ Write(values, stream, new CsvFileDescription());
}
public void Write(
IEnumerable values,
TextWriter stream,
- CsvFileDescription fileDescription)
- {
- WriteData(values, null, stream, fileDescription);
- }
-
- private void WriteData(
- IEnumerable values,
- string fileName,
- TextWriter stream,
- CsvFileDescription fileDescription)
+ CsvFileDescription fileDescription)
{
- FieldMapper fm = new FieldMapper(fileDescription, fileName, true);
+ FieldMapper fm = new FieldMapper(fileDescription, true);
CsvStream cs = new CsvStream(null, stream, fileDescription.SeparatorChar, fileDescription.IgnoreTrailingSeparatorChar);
List row = new List();
@@ -280,20 +222,10 @@ private void WriteData(
foreach (T obj in values)
{
- // Convert obj to row
+ // Convert object to row
fm.WriteObject(obj, row);
cs.WriteRow(row, fileDescription.QuoteAllFields);
}
}
-
- /// ///////////////////////////////////////////////////////////////////////
- /// CsvContext
- ///
- ///
- ///
- ///
- public CsvContext()
- {
- }
}
-}
+}
\ No newline at end of file
diff --git a/LINQtoCSV/CsvFileDescription.cs b/LINQtoCSV/CsvFileDescription.cs
index b983ca8..0141259 100644
--- a/LINQtoCSV/CsvFileDescription.cs
+++ b/LINQtoCSV/CsvFileDescription.cs
@@ -1,11 +1,10 @@
-using System;
-using System.Globalization;
+using System.Globalization;
using System.Text;
namespace LINQtoCSV
{
///
- /// Summary description for CsvFileDescription
+ /// Descriptors for a readable CSV file.
///
public class CsvFileDescription
{
@@ -14,67 +13,117 @@ public class CsvFileDescription
private int m_maximumNbrExceptions = 100;
- // --------------
-
- // Character used to separate fields in the file.
- // By default, this is comma (,).
- // For a tab delimited file, you would set this to
- // the tab character ('\t').
+ ///
+ /// Gets or sets the character used to separate fields in the file.
+ /// By default, this is comma (,).
+ /// For a tab delimited file, you would set this to
+ /// the tab character ('\t').
+ ///
+ ///
+ /// The separator character.
+ ///
public char SeparatorChar { get; set; }
+ ///
+ /// Gets or sets a value indicating whether there is no separator character.
+ ///
+ ///
+ /// true if there is no separator character; otherwise, false.
+ ///
public bool NoSeparatorChar { get; set; }
- // Only used when writing a file
- //
- // If true, all fields are quoted whatever their content.
- // If false, only fields containing a FieldSeparator character,
- // a quote or a newline are quoted.
- //
+ ///
+ /// Gets or sets a value indicating whether all fields are quoted, whatever their content.
+ /// If false, only fields containing a FieldSeparator character, a quote or a newline are quoted.
+ /// Only used when writing a file.
+ ///
+ ///
+ /// true if quoting all fields; otherwise, false.
+ ///
public bool QuoteAllFields { get; set; }
- // If true, then:
- // When writing a file, the column names are written in the
- // first line of the new file.
- // When reading a file, the column names are read from the first
- // line of the file.
- //
+ ///
+ /// Gets or sets a value indicating whether the column names are written in the
+ /// first line of the new file when writing, or whether the column names are read
+ /// from the first line of the file when reading.
+ ///
+ ///
+ /// true if first line has or should write column names; otherwise, false.
+ ///
public bool FirstLineHasColumnNames { get; set; }
- // If true, only public fields and properties with the
- // [CsvColumn] attribute are recognized.
- // If false, all public fields and properties are used.
- //
+ ///
+ /// Gets or sets a value indicating whether only public fields and properties with the
+ /// attribute are recognized. If false, all public fields
+ /// and properties are used.
+ ///
+ ///
+ /// true if, enforcing CSV column attribute; otherwise, false.
+ ///
public bool EnforceCsvColumnAttribute { get; set; }
- // FileCultureName and FileCultureInfo both get/set
- // the CultureInfo used for the file.
- // For example, if the file uses Dutch date and number formats
- // while the current culture is US English, set
- // FileCultureName to "nl-NL".
- //
- // To simply use the current culture, leave the culture as is.
- //
+ ///
+ /// Gets or sets the name of the file culture. FileCultureName
+ /// and FileCultureInfo both get/set the CultureInfo used for
+ /// the file.
+ ///
+ ///
+ /// For example, if the file uses Dutch date and number
+ /// formats while the current culture is US English, set
+ /// FileCultureName to "nl-NL". To simply use the current
+ /// culture, leave the culture as is.
+ ///
+ ///
+ /// The name of the file culture.
+ ///
public string FileCultureName
{
get { return m_cultureInfo.Name; }
set { m_cultureInfo = new CultureInfo(value); }
}
+ ///
+ /// Gets or sets the info of the file culture. FileCultureName
+ /// and FileCultureInfo both get/set the CultureInfo used for
+ /// the file.
+ ///
+ ///
+ /// For example, if the file uses Dutch date and number
+ /// formats while the current culture is US English, set
+ /// FileCultureName to "nl-NL". To simply use the current
+ /// culture, leave the culture as is.
+ ///
+ ///
+ /// The information of the file culture.
+ ///
public CultureInfo FileCultureInfo
{
get { return m_cultureInfo; }
set { m_cultureInfo = value; }
}
- // When reading a file, exceptions thrown while the file is being read
- // are captured in an aggregate exception. That aggregate exception is then
- // thrown at the end - to make it easier to solve multiple problems with the
- // input file in one.
- //
- // However, after MaximumNbrExceptions, the aggregate exception is thrown
- // immediately.
- //
- // To not have a maximum at all, set this to -1.
+ ///
+ /// Gets or sets the maximum number of exceptions before the AggregateException
+ /// is thrown.
+ ///
+ ///
+ ///
+ /// When reading a file, exceptions thrown while the file is being read
+ /// are captured in an aggregate exception. That aggregate exception is then
+ /// thrown at the end - to make it easier to solve multiple problems with the
+ /// input file in one.
+ ///
+ ///
+ /// However, after MaximumNbrExceptions, the aggregate exception is thrown
+ /// immediately.
+ ///
+ ///
+ /// To not have a maximum at all, set this to -1.
+ ///
+ ///
+ ///
+ /// The maximum number of exceptions.
+ ///
public int MaximumNbrExceptions
{
get { return m_maximumNbrExceptions; }
@@ -92,7 +141,7 @@ public int MaximumNbrExceptions
public bool IgnoreTrailingSeparatorChar { get; set; }
///
- /// If set to true, wil read only the fields specified as attributes, and will discard other fields in the CSV file
+ /// If set to true, will read only the fields specified as attributes, and will discard other fields in the CSV file
///
public bool IgnoreUnknownColumns { get; set; }
diff --git a/LINQtoCSV/Exceptions.cs b/LINQtoCSV/Exceptions.cs
index 56df4b0..1a72220 100644
--- a/LINQtoCSV/Exceptions.cs
+++ b/LINQtoCSV/Exceptions.cs
@@ -21,13 +21,6 @@ public LINQtoCSVException(
string message) : base(message)
{
}
-
- // ----
-
- public static string FileNameMessage(string fileName)
- {
- return ((fileName == null) ? "" : " Reading file \"" + fileName + "\".");
- }
}
/// ///////////////////////////////////////////////////////////////////////
@@ -141,16 +134,14 @@ public ToBeWrittenButMissingFieldIndexException(
///
public class NameNotInTypeException : LINQtoCSVException
{
- public NameNotInTypeException(string typeName, string fieldName, string fileName) :
+ public NameNotInTypeException(string typeName, string fieldName) :
base(string.Format(
- "The input file has column name \"{0}\" in the first record, but there is no field or property with that name in type \"{1}\"." +
- FileNameMessage(fileName),
+ "The input file has column name \"{0}\" in the first record, but there is no field or property with that name in type \"{1}\".",
fieldName,
typeName))
{
Data["TypeName"] = typeName;
Data["FieldName"] = fieldName;
- Data["FileName"] = fileName;
}
}
@@ -161,16 +152,14 @@ public NameNotInTypeException(string typeName, string fieldName, string fileName
///
public class MissingCsvColumnAttributeException : LINQtoCSVException
{
- public MissingCsvColumnAttributeException(string typeName, string fieldName, string fileName):
+ public MissingCsvColumnAttributeException(string typeName, string fieldName):
base(string.Format(
- "Field \"{0}\" in type \"{1}\" does not have the CsvColumn attribute." +
- FileNameMessage(fileName),
+ "Field \"{0}\" in type \"{1}\" does not have the CsvColumn attribute.",
fieldName,
typeName))
{
Data["TypeName"] = typeName;
Data["FieldName"] = fieldName;
- Data["FileName"] = fileName;
}
}
@@ -185,16 +174,14 @@ public MissingCsvColumnAttributeException(string typeName, string fieldName, str
///
public class TooManyDataFieldsException : LINQtoCSVException
{
- public TooManyDataFieldsException(string typeName, int lineNbr, string fileName):
+ public TooManyDataFieldsException(string typeName, int lineNbr):
base(string.Format(
- "Line {0} has more fields then are available in type \"{1}\"." +
- FileNameMessage(fileName),
+ "Line {0} has more fields then are available in type \"{1}\".",
lineNbr,
typeName))
{
Data["TypeName"] = typeName;
Data["LineNbr"] = lineNbr;
- Data["FileName"] = fileName;
}
}
@@ -207,16 +194,14 @@ public TooManyDataFieldsException(string typeName, int lineNbr, string fileName)
///
public class TooManyNonCsvColumnDataFieldsException : LINQtoCSVException
{
- public TooManyNonCsvColumnDataFieldsException(string typeName, int lineNbr, string fileName):
+ public TooManyNonCsvColumnDataFieldsException(string typeName, int lineNbr):
base(string.Format(
- "Line {0} has more fields then there are fields or properties in type \"{1}\" with the CsvColumn attribute set." +
- FileNameMessage(fileName),
+ "Line {0} has more fields then there are fields or properties in type \"{1}\" with the CsvColumn attribute set.",
lineNbr,
typeName))
{
Data["TypeName"] = typeName;
Data["LineNbr"] = lineNbr;
- Data["FileName"] = fileName;
}
}
@@ -230,38 +215,34 @@ public TooManyNonCsvColumnDataFieldsException(string typeName, int lineNbr, stri
///
public class MissingFieldIndexException : LINQtoCSVException
{
- public MissingFieldIndexException(string typeName, int lineNbr, string fileName):
+ public MissingFieldIndexException(string typeName, int lineNbr):
base(string.Format(
- "Line {0} has more fields then there are fields or properties in type \"{1}\" with a FieldIndex." +
- FileNameMessage(fileName),
+ "Line {0} has more fields then there are fields or properties in type \"{1}\" with a FieldIndex.",
lineNbr,
typeName))
{
Data["TypeName"] = typeName;
Data["LineNbr"] = lineNbr;
- Data["FileName"] = fileName;
}
}
///
/// Thrown when a type field/property has no corresponding field in the data for the corresponding FieldIndex.
- /// This means that the FieldIndex valus is greater then the number of items in a data.
+ /// This means that the FieldIndex value is greater then the number of items in a data.
///
/// All WrongFieldIndexException get aggregated into
/// an AggregatedException.
///
public class WrongFieldIndexException : LINQtoCSVException
{
- public WrongFieldIndexException(string typeName, int lineNbr, string fileName) :
+ public WrongFieldIndexException(string typeName, int lineNbr) :
base(string.Format(
- "Line {0} has less fields then the FieldIndex value is indicating in type \"{1}\" ." +
- FileNameMessage(fileName),
+ "Line {0} has less fields then the FieldIndex value is indicating in type \"{1}\" .",
lineNbr,
typeName))
{
Data["TypeName"] = typeName;
Data["LineNbr"] = lineNbr;
- Data["FileName"] = fileName;
}
}
@@ -276,19 +257,16 @@ public class MissingRequiredFieldException : LINQtoCSVException
public MissingRequiredFieldException(
string typeName,
string fieldName,
- int lineNbr,
- string fileName):
+ int lineNbr):
base(
string.Format(
- "In line {0}, no value provided for required field or property \"{1}\" in type \"{2}\"." +
- FileNameMessage(fileName),
+ "In line {0}, no value provided for required field or property \"{1}\" in type \"{2}\".",
lineNbr,
fieldName,
typeName))
{
Data["TypeName"] = typeName;
Data["LineNbr"] = lineNbr;
- Data["FileName"] = fileName;
Data["FieldName"] = fieldName;
}
}
@@ -306,12 +284,10 @@ public WrongDataFormatException(
string fieldName,
string fieldValue,
int lineNbr,
- string fileName,
Exception innerExc):
base(
string.Format(
- "Value \"{0}\" in line {1} has the wrong format for field or property \"{2}\" in type \"{3}\"." +
- FileNameMessage(fileName),
+ "Value \"{0}\" in line {1} has the wrong format for field or property \"{2}\" in type \"{3}\".",
fieldValue,
lineNbr,
fieldName,
@@ -320,7 +296,6 @@ public WrongDataFormatException(
{
Data["TypeName"] = typeName;
Data["LineNbr"] = lineNbr;
- Data["FileName"] = fileName;
Data["FieldValue"] = fieldValue;
Data["FieldName"] = fieldName;
}
@@ -339,17 +314,15 @@ public class AggregatedException : LINQtoCSVException
// -----
- public AggregatedException(string typeName, string fileName, int maximumNbrExceptions):
+ public AggregatedException(string typeName, int maximumNbrExceptions):
base(string.Format(
- "There were 1 or more exceptions while reading data using type \"{0}\"." +
- FileNameMessage(fileName),
+ "There were 1 or more exceptions while reading data using type \"{0}\".",
typeName))
{
m_MaximumNbrExceptions = maximumNbrExceptions;
m_InnerExceptionsList = new List();
Data["TypeName"] = typeName;
- Data["FileName"] = fileName;
Data["InnerExceptionsList"] = m_InnerExceptionsList;
}
diff --git a/LINQtoCSV/FieldMapper.cs b/LINQtoCSV/FieldMapper.cs
index 9ef9dfc..4907ee6 100644
--- a/LINQtoCSV/FieldMapper.cs
+++ b/LINQtoCSV/FieldMapper.cs
@@ -1,46 +1,38 @@
using System;
using System.Collections.Generic;
using System.Globalization;
-using System.ComponentModel;
using System.Linq;
using System.Reflection;
namespace LINQtoCSV
{
- internal class FieldMapper
+ internal class FieldMapper
{
protected class TypeFieldInfo : IComparable
{
public int index = CsvColumnAttribute.mc_DefaultFieldIndex;
- public string name = null;
+ public string name;
public bool canBeNull = true;
public NumberStyles inputNumberStyle = NumberStyles.Any;
- public string outputFormat = null;
+ public DateTimeStyles inputDateTimeStyle = DateTimeStyles.AssumeUniversal | DateTimeStyles.AllowWhiteSpaces;
+ public string outputFormat;
public bool hasColumnAttribute = false;
- public MemberInfo memberInfo = null;
- public Type fieldType = null;
+ public MemberInfo memberInfo;
+ public Type fieldType;
- // parseNumberMethod will remain null if the property is not a numeric type.
- // This would be the case for DateTime, Boolean, String and custom types.
- // In those cases, just use a TypeConverter.
- //
- // DateTime and Boolean also have Parse methods, but they don't provide
- // functionality that TypeConverter doesn't give you.
-
- public TypeConverter typeConverter = null;
- public MethodInfo parseNumberMethod = null;
+ public MethodInfo parseNumberMethod;
+ public MethodInfo parseBoolMethod;
+ public MethodInfo parseDateTimeMethod;
public MethodInfo parseExactMethod;
public int charLength = 0;
- // ----
-
public int CompareTo(TypeFieldInfo other)
{
return index.CompareTo(other.index);
}
- public override string ToString()
+ public override string ToString()
{
return string.Format("Index: {0}, Name: {1}", index, name);
}
@@ -55,16 +47,13 @@ public override string ToString()
///
/// Contains a mapping between the CSV column indexes that will read and the property indexes in the business object.
///
- protected IDictionary _mappingIndexes = new Dictionary();
+ protected IDictionary _mappingIndexes = new Dictionary();
// Used to build IndexToInfo
protected Dictionary m_NameToInfo = null;
protected CsvFileDescription m_fileDescription;
- // Only used when throwing an exception
- protected string m_fileName;
-
// -----------------------------
// AnalyzeTypeField
//
@@ -95,21 +84,28 @@ private TypeFieldInfo AnalyzeTypeField(
tfi.parseNumberMethod =
tfi.fieldType.GetMethod("Parse",
- new Type[] { typeof(String), typeof(NumberStyles), typeof(IFormatProvider) });
+ new Type[] { typeof(string), typeof(NumberStyles), typeof(IFormatProvider) });
if (tfi.parseNumberMethod == null)
{
- if (m_fileDescription.UseOutputFormatForParsingCsvValue)
- {
- tfi.parseExactMethod = tfi.fieldType.GetMethod("ParseExact",
- new Type[] {typeof (string), typeof (string), typeof (IFormatProvider)});
- }
+ tfi.parseDateTimeMethod =
+ tfi.fieldType.GetMethod("Parse",
+ new Type[] { typeof(string), typeof(IFormatProvider), typeof(DateTimeStyles) });
- tfi.typeConverter = null;
- if (tfi.parseExactMethod == null)
+ if (tfi.parseDateTimeMethod == null)
{
- tfi.typeConverter =
- TypeDescriptor.GetConverter(tfi.fieldType);
+ tfi.parseBoolMethod =
+ tfi.fieldType.GetMethod("Parse",
+ new Type[] { typeof(string) });
+
+ if (tfi.parseBoolMethod == null)
+ {
+ if (m_fileDescription.UseOutputFormatForParsingCsvValue)
+ {
+ tfi.parseExactMethod = tfi.fieldType.GetMethod("ParseExact",
+ new Type[] { typeof(string), typeof(string), typeof(IFormatProvider) });
+ }
+ }
}
}
@@ -118,12 +114,11 @@ private TypeFieldInfo AnalyzeTypeField(
tfi.index = CsvColumnAttribute.mc_DefaultFieldIndex;
tfi.name = mi.Name;
- tfi.inputNumberStyle = NumberStyles.Any;
tfi.outputFormat = "";
tfi.hasColumnAttribute = false;
tfi.charLength = 0;
- foreach (Object attribute in mi.GetCustomAttributes(typeof(CsvColumnAttribute), true))
+ foreach (object attribute in mi.GetCustomAttributes(typeof(CsvColumnAttribute), true))
{
CsvColumnAttribute cca = (CsvColumnAttribute)attribute;
@@ -137,11 +132,10 @@ private TypeFieldInfo AnalyzeTypeField(
tfi.canBeNull = cca.CanBeNull;
tfi.outputFormat = cca.OutputFormat;
tfi.inputNumberStyle = cca.NumberStyle;
+ tfi.inputDateTimeStyle = cca.DateTimeStyle;
tfi.charLength = cca.CharLength;
}
- // -----
-
if (allCsvColumnFieldsMustHaveFieldIndex &&
tfi.hasColumnAttribute &&
tfi.index == CsvColumnAttribute.mc_DefaultFieldIndex)
@@ -160,8 +154,6 @@ private TypeFieldInfo AnalyzeTypeField(
tfi.name);
}
- // -----
-
return tfi;
}
@@ -169,7 +161,7 @@ private TypeFieldInfo AnalyzeTypeField(
// AnalyzeType
//
protected void AnalyzeType(
- Type type,
+ Type type,
bool allRequiredFieldsMustHaveFieldIndex,
bool allCsvColumnFieldsMustHaveFieldIndex)
{
@@ -178,22 +170,19 @@ protected void AnalyzeType(
// ------
// Initialize NameToInfo
- foreach (MemberInfo mi in type.GetMembers(BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static ))
+ // Only process field and property members.
+ foreach (MemberInfo mi in type.GetProperties(BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static).Cast()
+ .Union(type.GetFields(BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static)))
{
- // Only process field and property members.
- if ((mi.MemberType == MemberTypes.Field) ||
- (mi.MemberType == MemberTypes.Property))
- {
- // Note that the compiler does not allow fields and/or properties
- // with the same name as some other field or property.
- TypeFieldInfo tfi =
- AnalyzeTypeField(
- mi,
- allRequiredFieldsMustHaveFieldIndex,
- allCsvColumnFieldsMustHaveFieldIndex);
-
- m_NameToInfo[tfi.name] = tfi;
- }
+ // Note that the compiler does not allow fields and/or properties
+ // with the same name as some other field or property.
+ TypeFieldInfo tfi =
+ AnalyzeTypeField(
+ mi,
+ allRequiredFieldsMustHaveFieldIndex,
+ allCsvColumnFieldsMustHaveFieldIndex);
+
+ m_NameToInfo[tfi.name] = tfi;
}
// -------
@@ -203,22 +192,22 @@ protected void AnalyzeType(
m_IndexToInfo = new TypeFieldInfo[nbrTypeFields];
_mappingIndexes = new Dictionary();
-
- int i=0;
+
+ int i = 0;
foreach (KeyValuePair kvp in m_NameToInfo)
{
m_IndexToInfo[i++] = kvp.Value;
}
- // Sort by FieldIndex. Fields without FieldIndex will
+ // Sort by FieldIndex. Fields without FieldIndex will
// be sorted towards the back, because their FieldIndex
// is Int32.MaxValue.
//
- // The sort order is important when reading a file that
+ // The sort order is important when reading a file that
// doesn't have the field names in the first line, and when
- // writing a file.
+ // writing a file.
//
- // Note that for reading from a file with field names in the
+ // Note that for reading from a file with field names in the
// first line, method ReadNames reworks IndexToInfo.
Array.Sort(m_IndexToInfo);
@@ -228,11 +217,11 @@ protected void AnalyzeType(
// However, allow gaps in the FieldIndex range, to make it easier to later insert
// fields in the range.
- int lastFieldIndex = Int32.MinValue;
+ int lastFieldIndex = int.MinValue;
string lastName = "";
- foreach(TypeFieldInfo tfi in m_IndexToInfo)
+ foreach (TypeFieldInfo tfi in m_IndexToInfo)
{
- if ((tfi.index == lastFieldIndex) &&
+ if ((tfi.index == lastFieldIndex) &&
(tfi.index != CsvColumnAttribute.mc_DefaultFieldIndex))
{
throw new DuplicateFieldIndexException(
@@ -249,12 +238,12 @@ protected void AnalyzeType(
/// ///////////////////////////////////////////////////////////////////////
/// FieldMapper
- ///
+ ///
///
/// Constructor
///
///
- public FieldMapper(CsvFileDescription fileDescription, string fileName, bool writingFile)
+ public FieldMapper(CsvFileDescription fileDescription, bool writingFile)
{
if ((!fileDescription.FirstLineHasColumnNames) &&
(!fileDescription.EnforceCsvColumnAttribute))
@@ -265,23 +254,22 @@ public FieldMapper(CsvFileDescription fileDescription, string fileName, bool wri
// ---------
m_fileDescription = fileDescription;
- m_fileName = fileName;
m_NameToInfo = new Dictionary();
AnalyzeType(
- typeof(T),
+ typeof(T),
!fileDescription.FirstLineHasColumnNames,
writingFile && !fileDescription.FirstLineHasColumnNames);
}
/// ///////////////////////////////////////////////////////////////////////
/// WriteNames
- ///
+ ///
///
/// Writes the field names given in T to row.
///
- ///
+ ///
public void WriteNames(List row)
{
row.Clear();
@@ -296,16 +284,13 @@ public void WriteNames(List row)
continue;
}
- // ----
-
row.Add(tfi.name);
}
}
-
/// ///////////////////////////////////////////////////////////////////////
/// WriteObject
- ///
+ ///
public void WriteObject(T obj, List row)
{
row.Clear();
@@ -320,9 +305,7 @@ public void WriteObject(T obj, List row)
continue;
}
- // ----
-
- Object objValue = null;
+ object objValue = null;
if (tfi.memberInfo is PropertyInfo)
{
@@ -353,8 +336,6 @@ public void WriteObject(T obj, List row)
}
}
- // -----
-
row.Add(resultString);
}
}
@@ -372,24 +353,21 @@ public void WriteObject(T obj, List row)
{
/// ///////////////////////////////////////////////////////////////////////
/// FieldMapper
- ///
+ ///
///
/// Constructor
///
///
public FieldMapper_Reading(
- CsvFileDescription fileDescription,
- string fileName,
+ CsvFileDescription fileDescription,
bool writingFile)
- : base(fileDescription, fileName, writingFile)
+ : base(fileDescription, writingFile)
{
-
}
-
/// ///////////////////////////////////////////////////////////////////////
/// ReadNames
- ///
+ ///
///
/// Assumes that the fields in parameter row are field names.
/// Reads the names into the objects internal structure.
@@ -397,7 +375,6 @@ public FieldMapper_Reading(
///
///
///
- ///
public void ReadNames(IDataRow row)
{
// It is now the order of the field names that determines
@@ -405,39 +382,43 @@ public void ReadNames(IDataRow row)
// the FieldIndex fields.
// If there are more names in the file then fields in the type,
- // and IgnoreUnknownColumns is set to `false` one of the names will
+ // and IgnoreUnknownColumns is set to `false` one of the names will
// not be found, causing an exception.
int currentNameIndex = 0;
- for (int i = 0; i < row.Count; i++) {
- if (!m_NameToInfo.ContainsKey(row[i].Value)) {
+ for (int i = 0; i < row.Count; i++)
+ {
+ if (!m_NameToInfo.ContainsKey(row[i].Value))
+ {
//If we have to ignore this column
- if (m_fileDescription.IgnoreUnknownColumns) {
+ if (m_fileDescription.IgnoreUnknownColumns)
+ {
continue;
}
// name not found
- throw new NameNotInTypeException(typeof (T).ToString(), row[i].Value, m_fileName);
+ throw new NameNotInTypeException(typeof(T).ToString(), row[i].Value);
}
- // ----
-
//Map the column index in the CSV file with the column index of the business object.
_mappingIndexes.Add(i, currentNameIndex);
currentNameIndex++;
}
- //Loop to the
- for (int i = 0; i < row.Count; i++) {
- if (!_mappingIndexes.ContainsKey(i)) {
+ //Loop to the
+ for (int i = 0; i < row.Count; i++)
+ {
+ if (!_mappingIndexes.ContainsKey(i))
+ {
continue;
}
m_IndexToInfo[_mappingIndexes[i]] = m_NameToInfo[row[i].Value];
- if (m_fileDescription.EnforceCsvColumnAttribute && (!m_IndexToInfo[i].hasColumnAttribute)) {
- // enforcing column attr, but this field/prop has no column attr.
- throw new MissingCsvColumnAttributeException(typeof (T).ToString(), row[i].Value, m_fileName);
+ if (m_fileDescription.EnforceCsvColumnAttribute && (!m_IndexToInfo[i].hasColumnAttribute))
+ {
+ // enforcing column attribute, but this field/prop has no column attribute.
+ throw new MissingCsvColumnAttributeException(typeof(T).ToString(), row[i].Value);
}
}
}
@@ -452,80 +433,80 @@ public List GetCharLengths()
/// ///////////////////////////////////////////////////////////////////////
/// ReadObject
- ///
+ ///
///
/// Creates an object of type T from the data in row and returns that object.
- ///
+ ///
///
///
///
///
- public T ReadObject(IDataRow row, AggregatedException ae) {
+ public T ReadObject(IDataRow row, AggregatedException ae)
+ {
//If there are more columns than the required
if (row.Count > m_IndexToInfo.Length)
{
//Are we ignoring unknown columns?
- if (!m_fileDescription.IgnoreUnknownColumns) {
+ if (!m_fileDescription.IgnoreUnknownColumns)
+ {
// Too many fields
- throw new TooManyDataFieldsException(typeof (T).ToString(), row[0].LineNbr, m_fileName);
+ throw new TooManyDataFieldsException(typeof(T).ToString(), row[0].LineNbr);
}
}
- // -----
-
T obj = new T();
//If we will be using the mappings, we just iterate through all the cells in this row
int maxRowCount = _mappingIndexes.Count > 0 ? row.Count : Math.Min(row.Count, m_IndexToInfo.Length);
- for (int i = 0; i < maxRowCount; i++) {
+ for (int i = 0; i < maxRowCount; i++)
+ {
TypeFieldInfo tfi;
//If there is some index mapping generated and the IgnoreUnknownColums is `true`
- if (m_fileDescription.IgnoreUnknownColumns && _mappingIndexes.Count > 0) {
- if (!_mappingIndexes.ContainsKey(i)) {
+ if (m_fileDescription.IgnoreUnknownColumns && _mappingIndexes.Count > 0)
+ {
+ if (!_mappingIndexes.ContainsKey(i))
+ {
continue;
}
tfi = m_IndexToInfo[_mappingIndexes[i]];
}
- else {
+ else
+ {
tfi = m_IndexToInfo[i];
}
if (m_fileDescription.EnforceCsvColumnAttribute &&
(!tfi.hasColumnAttribute))
{
- // enforcing column attr, but this field/prop has no column attr.
+ // enforcing column attribute, but this field/prop has no column attribute.
// So there are too many fields in this record.
- throw new TooManyNonCsvColumnDataFieldsException(typeof(T).ToString(), row[i].LineNbr, m_fileName);
+ throw new TooManyNonCsvColumnDataFieldsException(typeof(T).ToString(), row[i].LineNbr);
}
- // -----
-
if ((!m_fileDescription.FirstLineHasColumnNames) &&
(tfi.index == CsvColumnAttribute.mc_DefaultFieldIndex))
{
- // First line in the file does not have field names, so we're
+ // First line in the file does not have field names, so we're
// depending on the FieldIndex of each field in the type
// to ensure each value is placed in the correct field.
// However, now hit a field where there is no FieldIndex.
- throw new MissingFieldIndexException(typeof(T).ToString(), row[i].LineNbr, m_fileName);
+ throw new MissingFieldIndexException(typeof(T).ToString(), row[i].LineNbr);
}
- // -----
-
if (m_fileDescription.UseFieldIndexForReadingData && (!m_fileDescription.FirstLineHasColumnNames) &&
(tfi.index > row.Count))
{
- // First line in the file does not have field names, so we're
+ // First line in the file does not have field names, so we're
// depending on the FieldIndex of each field in the type
// to ensure each value is placed in the correct field.
// However, now hit a field where the FieldIndex is bigger
// than the total number of items in a row generated by the separatorChar
- throw new WrongFieldIndexException(typeof(T).ToString(), row[i].LineNbr, m_fileName);
+ throw new WrongFieldIndexException(typeof(T).ToString(), row[i].LineNbr);
}
int index = m_fileDescription.UseFieldIndexForReadingData ? tfi.index - 1 : i;
-
+
// value to put in the object
string value = row[index].Value;
@@ -537,51 +518,66 @@ public T ReadObject(IDataRow row, AggregatedException ae) {
new MissingRequiredFieldException(
typeof(T).ToString(),
tfi.name,
- row[i].LineNbr,
- m_fileName));
+ row[i].LineNbr));
}
}
else
{
try
{
- Object objValue = null;
+ object objValue = null;
- // Normally, either tfi.typeConverter is not null,
- // or tfi.parseNumberMethod is not null.
- //
- if (tfi.typeConverter != null)
- {
- objValue = tfi.typeConverter.ConvertFromString(
- null,
- m_fileDescription.FileCultureInfo,
- value);
- }
- else if (tfi.parseExactMethod != null)
+ if (tfi.parseExactMethod != null)
{
objValue =
tfi.parseExactMethod.Invoke(
tfi.fieldType,
- new Object[] {
- value,
- tfi.outputFormat,
- m_fileDescription.FileCultureInfo });
- }
+ new object[]
+ {
+ value,
+ tfi.outputFormat,
+ m_fileDescription.FileCultureInfo
+ });
+ }
else if (tfi.parseNumberMethod != null)
{
objValue =
tfi.parseNumberMethod.Invoke(
tfi.fieldType,
- new Object[] {
- value,
- tfi.inputNumberStyle,
- m_fileDescription.FileCultureInfo });
+ new object[]
+ {
+ value,
+ tfi.inputNumberStyle,
+ m_fileDescription.FileCultureInfo
+ });
+ }
+ else if (tfi.parseDateTimeMethod != null)
+ {
+ objValue =
+ tfi.parseDateTimeMethod.Invoke(
+ tfi.fieldType,
+ new object[]
+ {
+ value,
+ m_fileDescription.FileCultureInfo,
+ tfi.inputDateTimeStyle
+ });
+ }
+ else if (tfi.parseBoolMethod != null)
+ {
+ objValue =
+ tfi.parseBoolMethod.Invoke(
+ tfi.fieldType,
+ new object[]
+ {
+ value
+ });
}
else
{
// No TypeConverter and no Parse method available.
// Try direct approach.
- objValue = value;
+ objValue = value;
}
if (tfi.memberInfo is PropertyInfo)
@@ -607,7 +603,6 @@ public T ReadObject(IDataRow row, AggregatedException ae) {
tfi.name,
value,
row[i].LineNbr,
- m_fileName,
e);
}
@@ -633,12 +628,11 @@ public T ReadObject(IDataRow row, AggregatedException ae) {
new MissingRequiredFieldException(
typeof(T).ToString(),
tfi.name,
- row[row.Count - 1].LineNbr,
- m_fileName));
+ row[row.Count - 1].LineNbr));
}
}
return obj;
}
}
-}
+}
\ No newline at end of file
diff --git a/LINQtoCSV/LINQtoCSV.csproj b/LINQtoCSV/LINQtoCSV.csproj
index 17323b9..a8de16a 100644
--- a/LINQtoCSV/LINQtoCSV.csproj
+++ b/LINQtoCSV/LINQtoCSV.csproj
@@ -10,13 +10,25 @@
Properties
LINQtoCSV
LINQtoCSV
- v3.5
+ v4.0
512
- 3.5
+ 4.0
+ false
+
+
+
+
+
+
+
+
+ {786C830F-07A1-408B-BD7F-6EE04809D6DB};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}
+ Profile328
+ 10.0
publish\
true
Disk
@@ -29,17 +41,8 @@
true
0
1.0.0.%2a
- false
false
true
-
-
-
-
-
-
-
-
true
@@ -60,20 +63,6 @@
4
AllRules.ruleset
-
-
-
- 3.5
-
-
- 3.5
-
-
- 3.5
-
-
-
-
@@ -103,7 +92,7 @@
true
-
+