Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CachedCsvReader.CsvPropertyDescriptor.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CachedCsvReader.CsvPropertyDescriptor.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CachedCsvReader.CsvPropertyDescriptor.cs (revision 3110) @@ -0,0 +1,125 @@ +// LumenWorks.Framework.IO.CSV.CachedCsvReader.CsvPropertyDescriptor +// Copyright (c) 2006 S�bastien Lorion +// +// MIT license (http://en.wikipedia.org/wiki/MIT_License) +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do so, +// subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +using System; +using System.ComponentModel; + +namespace LumenWorks.Framework.IO.Csv +{ + public partial class CachedCsvReader + : CsvReader + { + /// + /// Represents a CSV field property descriptor. + /// + private class CsvPropertyDescriptor + : PropertyDescriptor + { + #region Fields + + /// + /// Contains the field index. + /// + private int _index; + + #endregion + + #region Constructors + + /// + /// Initializes a new instance of the CsvPropertyDescriptor class. + /// + /// The field name. + /// The field index. + public CsvPropertyDescriptor(string fieldName, int index) + : base(fieldName, null) + { + _index = index; + } + + #endregion + + #region Properties + + /// + /// Gets the field index. + /// + /// The field index. + public int Index + { + get { return _index; } + } + + #endregion + + #region Overrides + + public override bool CanResetValue(object component) + { + return false; + } + + public override object GetValue(object component) + { + return ((string[]) component)[_index]; + } + + public override void ResetValue(object component) + { + } + + public override void SetValue(object component, object value) + { + } + + public override bool ShouldSerializeValue(object component) + { + return false; + } + + public override Type ComponentType + { + get + { + return typeof(CachedCsvReader); + } + } + + public override bool IsReadOnly + { + get + { + return true; + } + } + + public override Type PropertyType + { + get + { + return typeof(string); + } + } + + #endregion + } + } +} Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Properties/AssemblyInfo.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Properties/AssemblyInfo.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Properties/AssemblyInfo.cs (revision 3110) @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("LumenWorks.Framework.IO")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("LumenWorks.Framework.IO")] +[assembly: AssemblyCopyright("Copyright © 2005 Sébastien Lorion")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] +[assembly: System.CLSCompliant(true)] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("a8577790-f852-47a5-94ec-ec5fdf155a55")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Revision and Build Numbers +// by using the '*' as shown below: +[assembly: AssemblyVersion("3.8.0.0")] +[assembly: AssemblyFileVersion("3.8.0.0")] Index: DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/Deltares.LayerOnSlopeTool.Data.csproj =================================================================== diff -u -r3102 -r3110 --- DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/Deltares.LayerOnSlopeTool.Data.csproj (.../Deltares.LayerOnSlopeTool.Data.csproj) (revision 3102) +++ DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/Deltares.LayerOnSlopeTool.Data.csproj (.../Deltares.LayerOnSlopeTool.Data.csproj) (revision 3110) @@ -44,8 +44,13 @@ + + + + + @@ -56,6 +61,11 @@ Copying.AGPL.licenseheader - + + + {e3b83d68-c90c-4874-8e5f-dec8d038bc7c} + LumenWorks.Framework.IO + + \ No newline at end of file Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/LumenWorks.Framework.snk =================================================================== diff -u Binary files differ Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/Resources/ExceptionMessage.Designer.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/Resources/ExceptionMessage.Designer.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/Resources/ExceptionMessage.Designer.cs (revision 3110) @@ -0,0 +1,207 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by a tool. +// Runtime Version:2.0.50727.42 +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + +namespace LumenWorks.Framework.IO.Csv.Resources { + using System; + + + /// + /// A strongly-typed resource class, for looking up localized strings, etc. + /// + // This class was auto-generated by the StronglyTypedResourceBuilder + // class via a tool like ResGen or Visual Studio. + // To add or remove a member, edit your .ResX file then rerun ResGen + // with the /str option, or rebuild your VS project. + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "2.0.0.0")] + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] + internal class ExceptionMessage { + + private static global::System.Resources.ResourceManager resourceMan; + + private static global::System.Globalization.CultureInfo resourceCulture; + + [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] + internal ExceptionMessage() { + } + + /// + /// Returns the cached ResourceManager instance used by this class. + /// + [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] + internal static global::System.Resources.ResourceManager ResourceManager { + get { + if (object.ReferenceEquals(resourceMan, null)) { + global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("LumenWorks.Framework.IO.Csv.Resources.ExceptionMessage", typeof(ExceptionMessage).Assembly); + resourceMan = temp; + } + return resourceMan; + } + } + + /// + /// Overrides the current thread's CurrentUICulture property for all + /// resource lookups using this strongly typed resource class. + /// + [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] + internal static global::System.Globalization.CultureInfo Culture { + get { + return resourceCulture; + } + set { + resourceCulture = value; + } + } + + /// + /// Looks up a localized string similar to Buffer size must be 1 or more.. + /// + internal static string BufferSizeTooSmall { + get { + return ResourceManager.GetString("BufferSizeTooSmall", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Cannot move to a previous record in forward-only mode.. + /// + internal static string CannotMovePreviousRecordInForwardOnly { + get { + return ResourceManager.GetString("CannotMovePreviousRecordInForwardOnly", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Cannot read record at index '{0}'.. + /// + internal static string CannotReadRecordAtIndex { + get { + return ResourceManager.GetString("CannotReadRecordAtIndex", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Enumeration has either not started or has already finished.. + /// + internal static string EnumerationFinishedOrNotStarted { + get { + return ResourceManager.GetString("EnumerationFinishedOrNotStarted", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Collection was modified; enumeration operation may not execute.. + /// + internal static string EnumerationVersionCheckFailed { + get { + return ResourceManager.GetString("EnumerationVersionCheckFailed", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to '{0}' field header not found.. + /// + internal static string FieldHeaderNotFound { + get { + return ResourceManager.GetString("FieldHeaderNotFound", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Field index must be included in [0, FieldCount[. Specified field index was : '{0}'.. + /// + internal static string FieldIndexOutOfRange { + get { + return ResourceManager.GetString("FieldIndexOutOfRange", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to The CSV appears to be corrupt near record '{0}' field '{1} at position '{2}'. Current raw data : '{3}'.. + /// + internal static string MalformedCsvException { + get { + return ResourceManager.GetString("MalformedCsvException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to '{0}' is not a supported missing field action.. + /// + internal static string MissingFieldActionNotSupported { + get { + return ResourceManager.GetString("MissingFieldActionNotSupported", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to No current record.. + /// + internal static string NoCurrentRecord { + get { + return ResourceManager.GetString("NoCurrentRecord", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to The CSV does not have headers (CsvReader.HasHeaders property is false).. + /// + internal static string NoHeaders { + get { + return ResourceManager.GetString("NoHeaders", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to The number of fields in the record is greater than the available space from index to the end of the destination array.. + /// + internal static string NotEnoughSpaceInArray { + get { + return ResourceManager.GetString("NotEnoughSpaceInArray", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to '{0}' is not a valid ParseErrorAction while inside a ParseError event.. + /// + internal static string ParseErrorActionInvalidInsideParseErrorEvent { + get { + return ResourceManager.GetString("ParseErrorActionInvalidInsideParseErrorEvent", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to '{0}' is not a supported ParseErrorAction.. + /// + internal static string ParseErrorActionNotSupported { + get { + return ResourceManager.GetString("ParseErrorActionNotSupported", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to This operation is invalid when the reader is closed.. + /// + internal static string ReaderClosed { + get { + return ResourceManager.GetString("ReaderClosed", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Record index must be 0 or more.. + /// + internal static string RecordIndexLessThanZero { + get { + return ResourceManager.GetString("RecordIndexLessThanZero", resourceCulture); + } + } + } +} Index: DamTools/LayerOnSlopeTool/trunk/src/LayerOnSlopeTool.sln =================================================================== diff -u -r3108 -r3110 --- DamTools/LayerOnSlopeTool/trunk/src/LayerOnSlopeTool.sln (.../LayerOnSlopeTool.sln) (revision 3108) +++ DamTools/LayerOnSlopeTool/trunk/src/LayerOnSlopeTool.sln (.../LayerOnSlopeTool.sln) (revision 3110) @@ -48,6 +48,8 @@ {0D7CF73B-3587-4296-A343-7AAB962D22B6} = {0D7CF73B-3587-4296-A343-7AAB962D22B6} EndProjectSection EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LumenWorks.Framework.IO", "Tools\LumenWorks.Framework.IO\LumenWorks.Framework.IO.csproj", "{E3B83D68-C90C-4874-8E5F-DEC8D038BC7C}" +EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Deltares.LayerOnSlopeTool.Io", "Deltares.LayerOnSlopeTool.Io\Deltares.LayerOnSlopeTool.Io.csproj", "{62171502-3F8D-45CB-992A-45C5AC0F8CE6}" ProjectSection(ProjectDependencies) = postProject {0D7CF73B-3587-4296-A343-7AAB962D22B6} = {0D7CF73B-3587-4296-A343-7AAB962D22B6} @@ -93,6 +95,9 @@ {83D297FB-CFC7-4154-AEB7-5E09E10E24D0}.Debug|x86.Build.0 = Debug|x86 {83D297FB-CFC7-4154-AEB7-5E09E10E24D0}.Release|x86.ActiveCfg = Release|x86 {83D297FB-CFC7-4154-AEB7-5E09E10E24D0}.Release|x86.Build.0 = Release|x86 + {E3B83D68-C90C-4874-8E5F-DEC8D038BC7C}.Debug|x86.ActiveCfg = Debug|x86 + {E3B83D68-C90C-4874-8E5F-DEC8D038BC7C}.Debug|x86.Build.0 = Debug|x86 + {E3B83D68-C90C-4874-8E5F-DEC8D038BC7C}.Release|x86.ActiveCfg = Release|x86 {62171502-3F8D-45CB-992A-45C5AC0F8CE6}.Debug|x86.ActiveCfg = Debug|x86 {62171502-3F8D-45CB-992A-45C5AC0F8CE6}.Debug|x86.Build.0 = Debug|x86 {62171502-3F8D-45CB-992A-45C5AC0F8CE6}.Release|x86.ActiveCfg = Release|x86 Index: DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CharacteristicPointCsvIdentifiers.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CharacteristicPointCsvIdentifiers.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CharacteristicPointCsvIdentifiers.cs (revision 3110) @@ -0,0 +1,89 @@ +// Copyright (C) Stichting Deltares 2020. All rights reserved. +// +// This file is part of the LayerOnSlopeTool +// +// The LayerOnSlopeTool is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . +// +// All names, logos, and references to "Deltares" are registered trademarks of +// Stichting Deltares and remain full property of Stichting Deltares at all times. +// All rights reserved. + +namespace Deltares.LayerOnSlopeTool.Data.CsvImporters +{ + /// + /// Holds the Identifiers for the characteristic points as defined in csv files + /// + public class CharacteristicPointCsvIdentifiers + { + // The following 2 fields point to the same data + // location_id is the preferred columnname + // Profielnaam is used for backward compatiblity + public const string SurfaceLineId = "Profielnaam"; public const string LocationId = "LocationId"; + + public const string SurfaceLevelInsideX = "X_Maaiveld binnenwaarts"; + public const string SurfaceLevelInsideY = "Y_Maaiveld binnenwaarts"; + public const string SurfaceLevelInsideZ = "Z_Maaiveld binnenwaarts"; + public const string DitchPolderSideX = "X_Insteek sloot polderzijde"; + public const string DitchPolderSideY = "Y_Insteek sloot polderzijde"; + public const string DitchPolderSideZ = "Z_Insteek sloot polderzijde"; + public const string BottomDitchPolderSideX = "X_Slootbodem polderzijde"; + public const string BottomDitchPolderSideY = "Y_Slootbodem polderzijde"; + public const string BottomDitchPolderSideZ = "Z_Slootbodem polderzijde"; + public const string BottomDitchDikeSideX = "X_Slootbodem dijkzijde"; + public const string BottomDitchDikeSideY = "Y_Slootbodem dijkzijde"; + public const string BottomDitchDikeSideZ = "Z_Slootbodem dijkzijde"; + public const string DitchDikeSideX = "X_Insteek sloot dijkzijde"; + public const string DitchDikeSideY = "Y_Insteek sloot dijkzijde"; + public const string DitchDikeSideZ = "Z_Insteek sloot dijkzijde"; + public const string DikeToeAtPolderX = "X_Teen dijk binnenwaarts"; + public const string DikeToeAtPolderY = "Y_Teen dijk binnenwaarts"; + public const string DikeToeAtPolderZ = "Z_Teen dijk binnenwaarts"; + public const string TopShoulderInsideX = "X_Kruin binnenberm"; + public const string TopShoulderInsideY = "Y_Kruin binnenberm"; + public const string TopShoulderInsideZ = "Z_Kruin binnenberm"; + public const string InsteekShoulderInsideX = "X_Insteek binnenberm"; + public const string InsteekShoulderInsideY = "Y_Insteek binnenberm"; + public const string InsteekShoulderInsideZ = "Z_Insteek binnenberm"; + public const string DikeTopAtPolderX = "X_Kruin binnentalud"; + public const string DikeTopAtPolderY = "Y_Kruin binnentalud"; + public const string DikeTopAtPolderZ = "Z_Kruin binnentalud"; + public const string TrafficLoadInsideX = "X_Verkeersbelasting kant binnenwaarts"; + public const string TrafficLoadInsideY = "Y_Verkeersbelasting kant binnenwaarts"; + public const string TrafficLoadInsideZ = "Z_Verkeersbelasting kant binnenwaarts"; + public const string TrafficLoadOutsideX = "X_Verkeersbelasting kant buitenwaarts"; + public const string TrafficLoadOutsideY = "Y_Verkeersbelasting kant buitenwaarts"; + public const string TrafficLoadOutsideZ = "Z_Verkeersbelasting kant buitenwaarts"; + public const string DikeTopAtRiverX = "X_Kruin buitentalud"; + public const string DikeTopAtRiverY = "Y_Kruin buitentalud"; + public const string DikeTopAtRiverZ = "Z_Kruin buitentalud"; + public const string InsteekShoulderOutsideX = "X_Insteek buitenberm"; + public const string InsteekShoulderOutsideY = "Y_Insteek buitenberm"; + public const string InsteekShoulderOutsideZ = "Z_Insteek buitenberm"; + public const string TopShoulderOutsideX = "X_Kruin buitenberm"; + public const string TopShoulderOutsideY = "Y_Kruin buitenberm"; + public const string TopShoulderOutsideZ = "Z_Kruin buitenberm"; + public const string DikeToeAtRiverX = "X_Teen dijk buitenwaarts"; + public const string DikeToeAtRiverY = "Y_Teen dijk buitenwaarts"; + public const string DikeToeAtRiverZ = "Z_Teen dijk buitenwaarts"; + public const string SurfaceLevelOutsideX = "X_Maaiveld buitenwaarts"; + public const string SurfaceLevelOutsideY = "Y_Maaiveld buitenwaarts"; + public const string SurfaceLevelOutsideZ = "Z_Maaiveld buitenwaarts"; + public const string InsertRiverChannelX = "X_Insteek geul"; + public const string InsertRiverChannelY = "Y_Insteek geul"; + public const string InsertRiverChannelZ = "Z_Insteek geul"; + public const string BottomRiverChannelX = "X_Teen geul"; + public const string BottomRiverChannelY = "Y_Teen geul"; + public const string BottomRiverChannelZ = "Z_Teen geul"; + } +} Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CachedCsvReader.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CachedCsvReader.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CachedCsvReader.cs (revision 3110) @@ -0,0 +1,410 @@ +// LumenWorks.Framework.IO.CSV.CachedCsvReader +// Copyright (c) 2005 S�bastien Lorion +// +// MIT license (http://en.wikipedia.org/wiki/MIT_License) +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do so, +// subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Globalization; +using System.IO; + +using LumenWorks.Framework.IO.Csv.Resources; + +namespace LumenWorks.Framework.IO.Csv +{ + /// + /// Represents a reader that provides fast, cached, dynamic access to CSV data. + /// + /// The number of records is limited to - 1. + public partial class CachedCsvReader + : CsvReader, IListSource + { + #region Fields + + /// + /// Contains the cached records. + /// + private List _records; + + /// + /// Contains the current record index (inside the cached records array). + /// + private long _currentRecordIndex; + + /// + /// Indicates if a new record is being read from the CSV stream. + /// + private bool _readingStream; + + /// + /// Contains the binding list linked to this reader. + /// + private CsvBindingList _bindingList; + + #endregion + + #region Constructors + + /// + /// Initializes a new instance of the CsvReader class. + /// + /// A pointing to the CSV file. + /// if field names are located on the first non commented line, otherwise, . + /// + /// is a . + /// + /// + /// Cannot read from . + /// + public CachedCsvReader(TextReader reader, bool hasHeaders) + : this(reader, hasHeaders, DefaultBufferSize) + { + } + + /// + /// Initializes a new instance of the CsvReader class. + /// + /// A pointing to the CSV file. + /// if field names are located on the first non commented line, otherwise, . + /// The buffer size in bytes. + /// + /// is a . + /// + /// + /// Cannot read from . + /// + public CachedCsvReader(TextReader reader, bool hasHeaders, int bufferSize) + : this(reader, hasHeaders, DefaultDelimiter, DefaultQuote, DefaultEscape, DefaultComment, ValueTrimmingOptions.UnquotedOnly, bufferSize) + { + } + + /// + /// Initializes a new instance of the CsvReader class. + /// + /// A pointing to the CSV file. + /// if field names are located on the first non commented line, otherwise, . + /// The delimiter character separating each field (default is ','). + /// + /// is a . + /// + /// + /// Cannot read from . + /// + public CachedCsvReader(TextReader reader, bool hasHeaders, char delimiter) + : this(reader, hasHeaders, delimiter, DefaultQuote, DefaultEscape, DefaultComment, ValueTrimmingOptions.UnquotedOnly, DefaultBufferSize) + { + } + + /// + /// Initializes a new instance of the CsvReader class. + /// + /// A pointing to the CSV file. + /// if field names are located on the first non commented line, otherwise, . + /// The delimiter character separating each field (default is ','). + /// The buffer size in bytes. + /// + /// is a . + /// + /// + /// Cannot read from . + /// + public CachedCsvReader(TextReader reader, bool hasHeaders, char delimiter, int bufferSize) + : this(reader, hasHeaders, delimiter, DefaultQuote, DefaultEscape, DefaultComment, ValueTrimmingOptions.UnquotedOnly, bufferSize) + { + } + + /// + /// Initializes a new instance of the CsvReader class. + /// + /// A pointing to the CSV file. + /// if field names are located on the first non commented line, otherwise, . + /// The delimiter character separating each field (default is ','). + /// The quotation character wrapping every field (default is '''). + /// + /// The escape character letting insert quotation characters inside a quoted field (default is '\'). + /// If no escape character, set to '\0' to gain some performance. + /// + /// The comment character indicating that a line is commented out (default is '#'). + /// Determines how values should be trimmed. + /// + /// is a . + /// + /// + /// Cannot read from . + /// + public CachedCsvReader(TextReader reader, bool hasHeaders, char delimiter, char quote, char escape, char comment, ValueTrimmingOptions trimmingOptions) + : this(reader, hasHeaders, delimiter, quote, escape, comment, trimmingOptions, DefaultBufferSize) + { + } + + /// + /// Initializes a new instance of the CsvReader class. + /// + /// A pointing to the CSV file. + /// if field names are located on the first non commented line, otherwise, . + /// The delimiter character separating each field (default is ','). + /// The quotation character wrapping every field (default is '''). + /// + /// The escape character letting insert quotation characters inside a quoted field (default is '\'). + /// If no escape character, set to '\0' to gain some performance. + /// + /// The comment character indicating that a line is commented out (default is '#'). + /// if spaces at the start and end of a field are trimmed, otherwise, . Default is . + /// The buffer size in bytes. + /// + /// is a . + /// + /// + /// must be 1 or more. + /// + public CachedCsvReader(TextReader reader, bool hasHeaders, char delimiter, char quote, char escape, char comment, ValueTrimmingOptions trimmingOptions, int bufferSize) + : base(reader, hasHeaders, delimiter, quote, escape, comment, trimmingOptions, bufferSize) + { + _records = new List(); + _currentRecordIndex = -1; + } + + #endregion + + #region Properties + + #region State + + /// + /// Gets the current record index in the CSV file. + /// + /// The current record index in the CSV file. + public override long CurrentRecordIndex + { + get + { + return _currentRecordIndex; + } + } + + /// + /// Gets a value that indicates whether the current stream position is at the end of the stream. + /// + /// if the current stream position is at the end of the stream; otherwise . + public override bool EndOfStream + { + get + { + if (_currentRecordIndex < base.CurrentRecordIndex) + return false; + else + return base.EndOfStream; + } + } + + #endregion + + #endregion + + #region Indexers + + /// + /// Gets the field at the specified index. + /// + /// The field at the specified index. + /// + /// must be included in [0, [. + /// + /// + /// No record read yet. Call ReadLine() first. + /// + /// + /// The CSV data appears to be missing a field. + /// + /// + /// The CSV appears to be corrupt at the current position. + /// + /// + /// The instance has been disposed of. + /// + public override String this[int field] + { + get + { + if (_readingStream) + return base[field]; + else if (_currentRecordIndex > -1) + { + if (field > -1 && field < this.FieldCount) + return _records[(int) _currentRecordIndex][field]; + else + throw new ArgumentOutOfRangeException("field", field, string.Format(CultureInfo.InvariantCulture, ExceptionMessage.FieldIndexOutOfRange, field)); + } + else + throw new InvalidOperationException(ExceptionMessage.NoCurrentRecord); + } + } + + #endregion + + #region Methods + + #region Read + + /// + /// Reads the CSV stream from the current position to the end of the stream. + /// + /// + /// The instance has been disposed of. + /// + public virtual void ReadToEnd() + { + _currentRecordIndex = base.CurrentRecordIndex; + + while (ReadNextRecord()) ; + } + + /// + /// Reads the next record. + /// + /// + /// Indicates if the reader will proceed to the next record after having read headers. + /// if it stops after having read headers; otherwise, . + /// + /// + /// Indicates if the reader will skip directly to the next line without parsing the current one. + /// To be used when an error occurs. + /// + /// if a record has been successfully reads; otherwise, . + /// + /// The instance has been disposed of. + /// + protected override bool ReadNextRecord(bool onlyReadHeaders, bool skipToNextLine) + { + if (_currentRecordIndex < base.CurrentRecordIndex) + { + _currentRecordIndex++; + return true; + } + else + { + _readingStream = true; + + try + { + bool canRead = base.ReadNextRecord(onlyReadHeaders, skipToNextLine); + + if (canRead) + { + string[] record = new string[this.FieldCount]; + + if (base.CurrentRecordIndex > -1) + { + CopyCurrentRecordTo(record); + _records.Add(record); + } + else + { + if (MoveTo(0)) + CopyCurrentRecordTo(record); + + MoveTo(-1); + } + + if (!onlyReadHeaders) + _currentRecordIndex++; + } + else + { + // No more records to read, so set array size to only what is needed + _records.Capacity = _records.Count; + } + + return canRead; + } + finally + { + _readingStream = false; + } + } + } + + #endregion + + #region Move + + /// + /// Moves before the first record. + /// + public void MoveToStart() + { + _currentRecordIndex = -1; + } + + /// + /// Moves to the last record read so far. + /// + public void MoveToLastCachedRecord() + { + _currentRecordIndex = base.CurrentRecordIndex; + } + + /// + /// Moves to the specified record index. + /// + /// The record index. + /// true if the operation was successful; otherwise, false. + /// + /// The instance has been disposed of. + /// + public override bool MoveTo(long record) + { + if (record < -1) + record = -1; + + if (record <= base.CurrentRecordIndex) + { + _currentRecordIndex = record; + return true; + } + else + { + _currentRecordIndex = base.CurrentRecordIndex; + return base.MoveTo(record); + } + } + + #endregion + + #endregion + + #region IListSource Members + + bool IListSource.ContainsListCollection + { + get { return false; } + } + + System.Collections.IList IListSource.GetList() + { + if (_bindingList == null) + _bindingList = new CsvBindingList(this); + + return _bindingList; + } + + #endregion + } +} Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/Resources/ExceptionMessage.resx =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/Resources/ExceptionMessage.resx (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/Resources/ExceptionMessage.resx (revision 3110) @@ -0,0 +1,175 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + text/microsoft-resx + + + 2.0 + + + System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + Buffer size must be 1 or more. + + + Cannot move to a previous record in forward-only mode. + + + Cannot read record at index '{0}'. + index:int + + + Enumeration has either not started or has already finished. + + + Collection was modified; enumeration operation may not execute. + + + '{0}' field header not found. + header:string + + + Field index must be included in [0, FieldCount[. Specified field index was : '{0}'. + index:int + + + The CSV appears to be corrupt near record '{0}' field '{1} at position '{2}'. Current raw data : '{3}'. + currentRecordIndex:int;currentFieldIndex:int;currentPosition:int;rawData:string + + + '{0}' is not a supported missing field action. + missingFieldAction:LumenWorks.Framework.IO.Csv.MissingFieldAction + + + No current record. + + + The CSV does not have headers (CsvReader.HasHeaders property is false). + + + The number of fields in the record is greater than the available space from index to the end of the destination array. + + + '{0}' is not a valid ParseErrorAction while inside a ParseError event. + parseErrorAction:LumenWorks.Framework.IO.Csv.ParseErrorAction + + + '{0}' is not a supported ParseErrorAction. + parseErrorAction:LumenWorks.Framework.IO.Csv.ParseErrorAction + + + This operation is invalid when the reader is closed. + + + Record index must be 0 or more. + + \ No newline at end of file Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CsvReader.RecordEnumerator.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CsvReader.RecordEnumerator.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CsvReader.RecordEnumerator.cs (revision 3110) @@ -0,0 +1,164 @@ +// LumenWorks.Framework.IO.CSV.CsvReader.RecordEnumerator +// Copyright (c) 2005 S�bastien Lorion +// +// MIT license (http://en.wikipedia.org/wiki/MIT_License) +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do so, +// subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +using System; +using System.Collections; +using System.Collections.Generic; + +using LumenWorks.Framework.IO.Csv.Resources; + +namespace LumenWorks.Framework.IO.Csv +{ + public partial class CsvReader + { + /// + /// Supports a simple iteration over the records of a . + /// + public struct RecordEnumerator + : IEnumerator, IEnumerator + { + #region Fields + + /// + /// Contains the enumerated . + /// + private CsvReader _reader; + + /// + /// Contains the current record. + /// + private string[] _current; + + /// + /// Contains the current record index. + /// + private long _currentRecordIndex; + + #endregion + + #region Constructors + + /// + /// Initializes a new instance of the class. + /// + /// The to iterate over. + /// + /// is a . + /// + public RecordEnumerator(CsvReader reader) + { + if (reader == null) + throw new ArgumentNullException("reader"); + + _reader = reader; + _current = null; + + _currentRecordIndex = reader._currentRecordIndex; + } + + #endregion + + #region IEnumerator Members + + /// + /// Gets the current record. + /// + public string[] Current + { + get { return _current; } + } + + /// + /// Advances the enumerator to the next record of the CSV. + /// + /// if the enumerator was successfully advanced to the next record, if the enumerator has passed the end of the CSV. + public bool MoveNext() + { + if (_reader._currentRecordIndex != _currentRecordIndex) + throw new InvalidOperationException(ExceptionMessage.EnumerationVersionCheckFailed); + + if (_reader.ReadNextRecord()) + { + _current = new string[_reader._fieldCount]; + + _reader.CopyCurrentRecordTo(_current); + _currentRecordIndex = _reader._currentRecordIndex; + + return true; + } + else + { + _current = null; + _currentRecordIndex = _reader._currentRecordIndex; + + return false; + } + } + + #endregion + + #region IEnumerator Members + + /// + /// Sets the enumerator to its initial position, which is before the first record in the CSV. + /// + public void Reset() + { + if (_reader._currentRecordIndex != _currentRecordIndex) + throw new InvalidOperationException(ExceptionMessage.EnumerationVersionCheckFailed); + + _reader.MoveTo(-1); + + _current = null; + _currentRecordIndex = _reader._currentRecordIndex; + } + + /// + /// Gets the current record. + /// + object IEnumerator.Current + { + get + { + if (_reader._currentRecordIndex != _currentRecordIndex) + throw new InvalidOperationException(ExceptionMessage.EnumerationVersionCheckFailed); + + return this.Current; + } + } + + #endregion + + #region IDisposable Members + + /// + /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + /// + public void Dispose() + { + _reader = null; + _current = null; + } + + #endregion + } + } +} \ No newline at end of file Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CachedCsvReader.CsvRecordComparer.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CachedCsvReader.CsvRecordComparer.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CachedCsvReader.CsvRecordComparer.cs (revision 3110) @@ -0,0 +1,85 @@ +// LumenWorks.Framework.IO.CSV.CachedCsvReader.CsvRecordComparer +// Copyright (c) 2006 S�bastien Lorion +// +// MIT license (http://en.wikipedia.org/wiki/MIT_License) +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do so, +// subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using Debug = System.Diagnostics.Debug; +using System.Globalization; + +namespace LumenWorks.Framework.IO.Csv +{ + public partial class CachedCsvReader + : CsvReader + { + /// + /// Represents a CSV record comparer. + /// + private class CsvRecordComparer + : IComparer + { + #region Fields + + /// + /// Contains the field index of the values to compare. + /// + private int _field; + + /// + /// Contains the sort direction. + /// + private ListSortDirection _direction; + + #endregion + + #region Constructors + + /// + /// Initializes a new instance of the CsvRecordComparer class. + /// + /// The field index of the values to compare. + /// The sort direction. + public CsvRecordComparer(int field, ListSortDirection direction) + { + if (field < 0) + throw new ArgumentOutOfRangeException("field", field, string.Format(CultureInfo.InvariantCulture, Resources.ExceptionMessage.FieldIndexOutOfRange, field)); + + _field = field; + _direction = direction; + } + + #endregion + + #region IComparer Members + + public int Compare(string[] x, string[] y) + { + Debug.Assert(x != null && y != null && x.Length == y.Length && _field < x.Length); + + int result = String.Compare(x[_field], y[_field], StringComparison.CurrentCulture); + + return (_direction == ListSortDirection.Ascending ? result : -result); + } + + #endregion + } + } +} \ No newline at end of file Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CsvReader.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CsvReader.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CsvReader.cs (revision 3110) @@ -0,0 +1,2470 @@ +// LumenWorks.Framework.IO.CSV.CsvReader +// Copyright (c) 2005 S�bastien Lorion +// +// MIT license (http://en.wikipedia.org/wiki/MIT_License) +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do so, +// subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Data; +using System.Data.Common; +using Debug = System.Diagnostics.Debug; +using System.Globalization; +using System.IO; + +using LumenWorks.Framework.IO.Csv.Resources; + +namespace LumenWorks.Framework.IO.Csv +{ + /// + /// Represents a reader that provides fast, non-cached, forward-only access to CSV data. + /// + public partial class CsvReader + : IDataReader, IEnumerable, IDisposable + { + #region Constants + + /// + /// Defines the default buffer size. + /// + public const int DefaultBufferSize = 0x1000; + + /// + /// Defines the default delimiter character separating each field. + /// + public const char DefaultDelimiter = ';'; + + /// + /// Defines the default quote character wrapping every field. + /// + public const char DefaultQuote = '"'; + + /// + /// Defines the default escape character letting insert quotation characters inside a quoted field. + /// + public const char DefaultEscape = '"'; + + /// + /// Defines the default comment character indicating that a line is commented out. + /// + public const char DefaultComment = '#'; + + #endregion + + #region Fields + + /// + /// Contains the field header comparer. + /// + private static readonly StringComparer _fieldHeaderComparer = StringComparer.CurrentCultureIgnoreCase; + + #region Settings + + /// + /// Contains the pointing to the CSV file. + /// + private TextReader _reader; + + /// + /// Contains the buffer size. + /// + private int _bufferSize; + + /// + /// Contains the comment character indicating that a line is commented out. + /// + private char _comment; + + /// + /// Contains the escape character letting insert quotation characters inside a quoted field. + /// + private char _escape; + + /// + /// Contains the delimiter character separating each field. + /// + private char _delimiter; + + /// + /// Contains the quotation character wrapping every field. + /// + private char _quote; + + /// + /// Determines which values should be trimmed. + /// + private ValueTrimmingOptions _trimmingOptions; + + /// + /// Indicates if field names are located on the first non commented line. + /// + private bool _hasHeaders; + + /// + /// Contains the default action to take when a parsing error has occured. + /// + private ParseErrorAction _defaultParseErrorAction; + + /// + /// Contains the action to take when a field is missing. + /// + private MissingFieldAction _missingFieldAction; + + /// + /// Indicates if the reader supports multiline. + /// + private bool _supportsMultiline; + + /// + /// Indicates if the reader will skip empty lines. + /// + private bool _skipEmptyLines; + + #endregion + + #region State + + /// + /// Indicates if the class is initialized. + /// + private bool _initialized; + + /// + /// Contains the field headers. + /// + private string[] _fieldHeaders; + + /// + /// Contains the dictionary of field indexes by header. The key is the field name and the value is its index. + /// + private Dictionary _fieldHeaderIndexes; + + /// + /// Contains the current record index in the CSV file. + /// A value of means that the reader has not been initialized yet. + /// Otherwise, a negative value means that no record has been read yet. + /// + private long _currentRecordIndex; + + /// + /// Contains the starting position of the next unread field. + /// + private int _nextFieldStart; + + /// + /// Contains the index of the next unread field. + /// + private int _nextFieldIndex; + + /// + /// Contains the array of the field values for the current record. + /// A null value indicates that the field have not been parsed. + /// + private string[] _fields; + + /// + /// Contains the maximum number of fields to retrieve for each record. + /// + private int _fieldCount; + + /// + /// Contains the read buffer. + /// + private char[] _buffer; + + /// + /// Contains the current read buffer length. + /// + private int _bufferLength; + + /// + /// Indicates if the end of the reader has been reached. + /// + private bool _eof; + + /// + /// Indicates if the last read operation reached an EOL character. + /// + private bool _eol; + + /// + /// Indicates if the first record is in cache. + /// This can happen when initializing a reader with no headers + /// because one record must be read to get the field count automatically + /// + private bool _firstRecordInCache; + + /// + /// Indicates if one or more field are missing for the current record. + /// Resets after each successful record read. + /// + private bool _missingFieldFlag; + + /// + /// Indicates if a parse error occured for the current record. + /// Resets after each successful record read. + /// + private bool _parseErrorFlag; + + #endregion + + #endregion + + #region Constructors + + /// + /// Initializes a new instance of the CsvReader class. + /// + /// A pointing to the CSV file. + /// if field names are located on the first non commented line, otherwise, . + /// + /// is a . + /// + /// + /// Cannot read from . + /// + public CsvReader(TextReader reader, bool hasHeaders) + : this(reader, hasHeaders, DefaultDelimiter, DefaultQuote, DefaultEscape, DefaultComment, ValueTrimmingOptions.UnquotedOnly, DefaultBufferSize) + { + } + + /// + /// Initializes a new instance of the CsvReader class. + /// + /// A pointing to the CSV file. + /// if field names are located on the first non commented line, otherwise, . + /// The buffer size in bytes. + /// + /// is a . + /// + /// + /// Cannot read from . + /// + public CsvReader(TextReader reader, bool hasHeaders, int bufferSize) + : this(reader, hasHeaders, DefaultDelimiter, DefaultQuote, DefaultEscape, DefaultComment, ValueTrimmingOptions.UnquotedOnly, bufferSize) + { + } + + /// + /// Initializes a new instance of the CsvReader class. + /// + /// A pointing to the CSV file. + /// if field names are located on the first non commented line, otherwise, . + /// The delimiter character separating each field (default is ','). + /// + /// is a . + /// + /// + /// Cannot read from . + /// + public CsvReader(TextReader reader, bool hasHeaders, char delimiter) + : this(reader, hasHeaders, delimiter, DefaultQuote, DefaultEscape, DefaultComment, ValueTrimmingOptions.UnquotedOnly, DefaultBufferSize) + { + } + + /// + /// Initializes a new instance of the CsvReader class. + /// + /// A pointing to the CSV file. + /// if field names are located on the first non commented line, otherwise, . + /// The delimiter character separating each field (default is ','). + /// The buffer size in bytes. + /// + /// is a . + /// + /// + /// Cannot read from . + /// + public CsvReader(TextReader reader, bool hasHeaders, char delimiter, int bufferSize) + : this(reader, hasHeaders, delimiter, DefaultQuote, DefaultEscape, DefaultComment, ValueTrimmingOptions.UnquotedOnly, bufferSize) + { + } + + /// + /// Initializes a new instance of the CsvReader class. + /// + /// A pointing to the CSV file. + /// if field names are located on the first non commented line, otherwise, . + /// The delimiter character separating each field (default is ','). + /// The quotation character wrapping every field (default is '''). + /// + /// The escape character letting insert quotation characters inside a quoted field (default is '\'). + /// If no escape character, set to '\0' to gain some performance. + /// + /// The comment character indicating that a line is commented out (default is '#'). + /// Determines which values should be trimmed. + /// + /// is a . + /// + /// + /// Cannot read from . + /// + public CsvReader(TextReader reader, bool hasHeaders, char delimiter, char quote, char escape, char comment, ValueTrimmingOptions trimmingOptions) + : this(reader, hasHeaders, delimiter, quote, escape, comment, trimmingOptions, DefaultBufferSize) + { + } + + /// + /// Initializes a new instance of the CsvReader class. + /// + /// A pointing to the CSV file. + /// if field names are located on the first non commented line, otherwise, . + /// The delimiter character separating each field (default is ','). + /// The quotation character wrapping every field (default is '''). + /// + /// The escape character letting insert quotation characters inside a quoted field (default is '\'). + /// If no escape character, set to '\0' to gain some performance. + /// + /// The comment character indicating that a line is commented out (default is '#'). + /// Determines which values should be trimmed. + /// The buffer size in bytes. + /// + /// is a . + /// + /// + /// must be 1 or more. + /// + public CsvReader(TextReader reader, bool hasHeaders, char delimiter, char quote, char escape, char comment, ValueTrimmingOptions trimmingOptions, int bufferSize) + { +#if DEBUG + _allocStack = new System.Diagnostics.StackTrace(); +#endif + + if (reader == null) + throw new ArgumentNullException("reader"); + + if (bufferSize <= 0) + throw new ArgumentOutOfRangeException("bufferSize", bufferSize, ExceptionMessage.BufferSizeTooSmall); + + _bufferSize = bufferSize; + + if (reader is StreamReader) + { + Stream stream = ((StreamReader)reader).BaseStream; + + if (stream.CanSeek) + { + // Handle bad implementations returning 0 or less + if (stream.Length > 0) + _bufferSize = (int)Math.Min(bufferSize, stream.Length); + } + } + + _reader = reader; + _delimiter = delimiter; + _quote = quote; + _escape = escape; + _comment = comment; + + _hasHeaders = hasHeaders; + _trimmingOptions = trimmingOptions; + _supportsMultiline = true; + _skipEmptyLines = true; + this.DefaultHeaderName = "Column"; + + _currentRecordIndex = -1; + _defaultParseErrorAction = ParseErrorAction.RaiseEvent; + } + + #endregion + + #region Events + + /// + /// Occurs when there is an error while parsing the CSV stream. + /// + public event EventHandler ParseError; + + /// + /// Raises the event. + /// + /// The that contains the event data. + protected virtual void OnParseError(ParseErrorEventArgs e) + { + EventHandler handler = ParseError; + + if (handler != null) + handler(this, e); + } + + #endregion + + #region Properties + + #region Settings + + /// + /// Gets the comment character indicating that a line is commented out. + /// + /// The comment character indicating that a line is commented out. + public char Comment + { + get + { + return _comment; + } + } + + /// + /// Gets the escape character letting insert quotation characters inside a quoted field. + /// + /// The escape character letting insert quotation characters inside a quoted field. + public char Escape + { + get + { + return _escape; + } + } + + /// + /// Gets the delimiter character separating each field. + /// + /// The delimiter character separating each field. + public char Delimiter + { + get + { + return _delimiter; + } + } + + /// + /// Gets the quotation character wrapping every field. + /// + /// The quotation character wrapping every field. + public char Quote + { + get + { + return _quote; + } + } + + /// + /// Indicates if field names are located on the first non commented line. + /// + /// if field names are located on the first non commented line, otherwise, . + public bool HasHeaders + { + get + { + return _hasHeaders; + } + } + + /// + /// Indicates if spaces at the start and end of a field are trimmed. + /// + /// if spaces at the start and end of a field are trimmed, otherwise, . + public ValueTrimmingOptions TrimmingOption + { + get + { + return _trimmingOptions; + } + } + + /// + /// Gets the buffer size. + /// + public int BufferSize + { + get + { + return _bufferSize; + } + } + + /// + /// Gets or sets the default action to take when a parsing error has occured. + /// + /// The default action to take when a parsing error has occured. + public ParseErrorAction DefaultParseErrorAction + { + get + { + return _defaultParseErrorAction; + } + set + { + _defaultParseErrorAction = value; + } + } + + /// + /// Gets or sets the action to take when a field is missing. + /// + /// The action to take when a field is missing. + public MissingFieldAction MissingFieldAction + { + get + { + return _missingFieldAction; + } + set + { + _missingFieldAction = value; + } + } + + /// + /// Gets or sets a value indicating if the reader supports multiline fields. + /// + /// A value indicating if the reader supports multiline field. + public bool SupportsMultiline + { + get + { + return _supportsMultiline; + } + set + { + _supportsMultiline = value; + } + } + + /// + /// Gets or sets a value indicating if the reader will skip empty lines. + /// + /// A value indicating if the reader will skip empty lines. + public bool SkipEmptyLines + { + get + { + return _skipEmptyLines; + } + set + { + _skipEmptyLines = value; + } + } + + /// + /// Gets or sets the default header name when it is an empty string or only whitespaces. + /// The header index will be appended to the specified name. + /// + /// The default header name when it is an empty string or only whitespaces. + public string DefaultHeaderName { get; set; } + + #endregion + + #region State + + /// + /// Gets the maximum number of fields to retrieve for each record. + /// + /// The maximum number of fields to retrieve for each record. + /// + /// The instance has been disposed of. + /// + public int FieldCount + { + get + { + EnsureInitialize(); + return _fieldCount; + } + } + + /// + /// Gets a value that indicates whether the current stream position is at the end of the stream. + /// + /// if the current stream position is at the end of the stream; otherwise . + public virtual bool EndOfStream + { + get + { + return _eof; + } + } + + /// + /// Gets the field headers. + /// + /// The field headers or an empty array if headers are not supported. + /// + /// The instance has been disposed of. + /// + public string[] GetFieldHeaders() + { + EnsureInitialize(); + Debug.Assert(_fieldHeaders != null, "Field headers must be non null."); + + string[] fieldHeaders = new string[_fieldHeaders.Length]; + + for (int i = 0; i < fieldHeaders.Length; i++) + fieldHeaders[i] = _fieldHeaders[i]; + + return fieldHeaders; + } + + /// + /// Gets the current record index in the CSV file. + /// + /// The current record index in the CSV file. + public virtual long CurrentRecordIndex + { + get + { + return _currentRecordIndex; + } + } + + /// + /// Indicates if one or more field are missing for the current record. + /// Resets after each successful record read. + /// + public bool MissingFieldFlag + { + get { return _missingFieldFlag; } + } + + /// + /// Indicates if a parse error occured for the current record. + /// Resets after each successful record read. + /// + public bool ParseErrorFlag + { + get { return _parseErrorFlag; } + } + + #endregion + + #endregion + + #region Indexers + + /// + /// Gets the field with the specified name and record position. must be . + /// + /// + /// The field with the specified name and record position. + /// + /// + /// is or an empty string. + /// + /// + /// The CSV does not have headers ( property is ). + /// + /// + /// not found. + /// + /// + /// Record index must be > 0. + /// + /// + /// Cannot move to a previous record in forward-only mode. + /// + /// + /// Cannot read record at . + /// + /// + /// The CSV appears to be corrupt at the current position. + /// + /// + /// The instance has been disposed of. + /// + public string this[int record, string field] + { + get + { + if (!MoveTo(record)) + throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, ExceptionMessage.CannotReadRecordAtIndex, record)); + + return this[field]; + } + } + + /// + /// Gets the field at the specified index and record position. + /// + /// + /// The field at the specified index and record position. + /// A is returned if the field cannot be found for the record. + /// + /// + /// must be included in [0, [. + /// + /// + /// Record index must be > 0. + /// + /// + /// Cannot move to a previous record in forward-only mode. + /// + /// + /// Cannot read record at . + /// + /// + /// The CSV appears to be corrupt at the current position. + /// + /// + /// The instance has been disposed of. + /// + public string this[int record, int field] + { + get + { + if (!MoveTo(record)) + throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, ExceptionMessage.CannotReadRecordAtIndex, record)); + + return this[field]; + } + } + + /// + /// Gets the field with the specified name. must be . + /// + /// + /// The field with the specified name. + /// + /// + /// is or an empty string. + /// + /// + /// The CSV does not have headers ( property is ). + /// + /// + /// not found. + /// + /// + /// The CSV appears to be corrupt at the current position. + /// + /// + /// The instance has been disposed of. + /// + public string this[string field] + { + get + { + if (string.IsNullOrEmpty(field)) + throw new ArgumentNullException("field"); + + if (!_hasHeaders) + throw new InvalidOperationException(ExceptionMessage.NoHeaders); + + int index = GetFieldIndex(field); + + if (index < 0) + throw new ArgumentException(string.Format(CultureInfo.InvariantCulture, ExceptionMessage.FieldHeaderNotFound, field), "field"); + + return this[index]; + } + } + + /// + /// Gets the field at the specified index. + /// + /// The field at the specified index. + /// + /// must be included in [0, [. + /// + /// + /// No record read yet. Call ReadLine() first. + /// + /// + /// The CSV appears to be corrupt at the current position. + /// + /// + /// The instance has been disposed of. + /// + public virtual string this[int field] + { + get + { + return ReadField(field, false, false); + } + } + + #endregion + + #region Methods + + #region EnsureInitialize + + /// + /// Ensures that the reader is initialized. + /// + private void EnsureInitialize() + { + if (!_initialized) + this.ReadNextRecord(true, false); + + Debug.Assert(_fieldHeaders != null); + Debug.Assert(_fieldHeaders.Length > 0 || (_fieldHeaders.Length == 0 && _fieldHeaderIndexes == null)); + } + + #endregion + + #region GetFieldIndex + + /// + /// Gets the field index for the provided header. + /// + /// The header to look for. + /// The field index for the provided header. -1 if not found. + /// + /// The instance has been disposed of. + /// + public int GetFieldIndex(string header) + { + EnsureInitialize(); + + int index; + + if (_fieldHeaderIndexes != null && _fieldHeaderIndexes.TryGetValue(header, out index)) + return index; + else + return -1; + } + + #endregion + + #region CopyCurrentRecordTo + + /// + /// Copies the field array of the current record to a one-dimensional array, starting at the beginning of the target array. + /// + /// The one-dimensional that is the destination of the fields of the current record. + /// + /// is . + /// + /// + /// The number of fields in the record is greater than the available space from to the end of . + /// + public void CopyCurrentRecordTo(string[] array) + { + CopyCurrentRecordTo(array, 0); + } + + /// + /// Copies the field array of the current record to a one-dimensional array, starting at the beginning of the target array. + /// + /// The one-dimensional that is the destination of the fields of the current record. + /// The zero-based index in at which copying begins. + /// + /// is . + /// + /// + /// is les than zero or is equal to or greater than the length . + /// + /// + /// No current record. + /// + /// + /// The number of fields in the record is greater than the available space from to the end of . + /// + public void CopyCurrentRecordTo(string[] array, int index) + { + if (array == null) + throw new ArgumentNullException("array"); + + if (index < 0 || index >= array.Length) + throw new ArgumentOutOfRangeException("index", index, string.Empty); + + if (_currentRecordIndex < 0 || !_initialized) + throw new InvalidOperationException(ExceptionMessage.NoCurrentRecord); + + if (array.Length - index < _fieldCount) + throw new ArgumentException(ExceptionMessage.NotEnoughSpaceInArray, "array"); + + for (int i = 0; i < _fieldCount; i++) + { + if (_parseErrorFlag) + array[index + i] = null; + else + array[index + i] = this[i]; + } + } + + #endregion + + #region GetCurrentRawData + + /// + /// Gets the current raw CSV data. + /// + /// Used for exception handling purpose. + /// The current raw CSV data. + public string GetCurrentRawData() + { + if (_buffer != null && _bufferLength > 0) + return new string(_buffer, 0, _bufferLength); + else + return string.Empty; + } + + #endregion + + #region IsWhiteSpace + + /// + /// Indicates whether the specified Unicode character is categorized as white space. + /// + /// A Unicode character. + /// if is white space; otherwise, . + private bool IsWhiteSpace(char c) + { + // Handle cases where the delimiter is a whitespace (e.g. tab) + if (c == _delimiter) + return false; + else + { + // See char.IsLatin1(char c) in Reflector + if (c <= '\x00ff') + return (c == ' ' || c == '\t'); + else + return (System.Globalization.CharUnicodeInfo.GetUnicodeCategory(c) == System.Globalization.UnicodeCategory.SpaceSeparator); + } + } + + #endregion + + #region MoveTo + + /// + /// Moves to the specified record index. + /// + /// The record index. + /// true if the operation was successful; otherwise, false. + /// + /// The instance has been disposed of. + /// + public virtual bool MoveTo(long record) + { + if (record < _currentRecordIndex) + return false; + + // Get number of record to read + long offset = record - _currentRecordIndex; + + while (offset > 0) + { + if (!ReadNextRecord()) + return false; + + offset--; + } + + return true; + } + + #endregion + + #region ParseNewLine + + /// + /// Parses a new line delimiter. + /// + /// The starting position of the parsing. Will contain the resulting end position. + /// if a new line delimiter was found; otherwise, . + /// + /// The instance has been disposed of. + /// + private bool ParseNewLine(ref int pos) + { + Debug.Assert(pos <= _bufferLength); + + // Check if already at the end of the buffer + if (pos == _bufferLength) + { + pos = 0; + + if (!ReadBuffer()) + return false; + } + + char c = _buffer[pos]; + + // Treat \r as new line only if it's not the delimiter + + if (c == '\r' && _delimiter != '\r') + { + pos++; + + // Skip following \n (if there is one) + + if (pos < _bufferLength) + { + if (_buffer[pos] == '\n') + pos++; + } + else + { + if (ReadBuffer()) + { + if (_buffer[0] == '\n') + pos = 1; + else + pos = 0; + } + } + + if (pos >= _bufferLength) + { + ReadBuffer(); + pos = 0; + } + + return true; + } + else if (c == '\n') + { + pos++; + + if (pos >= _bufferLength) + { + ReadBuffer(); + pos = 0; + } + + return true; + } + + return false; + } + + /// + /// Determines whether the character at the specified position is a new line delimiter. + /// + /// The position of the character to verify. + /// + /// if the character at the specified position is a new line delimiter; otherwise, . + /// + private bool IsNewLine(int pos) + { + Debug.Assert(pos < _bufferLength); + + char c = _buffer[pos]; + + if (c == '\n') + return true; + else if (c == '\r' && _delimiter != '\r') + return true; + else + return false; + } + + #endregion + + #region ReadBuffer + + /// + /// Fills the buffer with data from the reader. + /// + /// if data was successfully read; otherwise, . + /// + /// The instance has been disposed of. + /// + private bool ReadBuffer() + { + if (_eof) + return false; + + CheckDisposed(); + + _bufferLength = _reader.Read(_buffer, 0, _bufferSize); + + if (_bufferLength > 0) + return true; + else + { + _eof = true; + _buffer = null; + + return false; + } + } + + #endregion + + #region ReadField + + /// + /// Reads the field at the specified index. + /// Any unread fields with an inferior index will also be read as part of the required parsing. + /// + /// The field index. + /// Indicates if the reader is currently initializing. + /// Indicates if the value(s) are discarded. + /// + /// The field at the specified index. + /// A indicates that an error occured or that the last field has been reached during initialization. + /// + /// + /// is out of range. + /// + /// + /// There is no current record. + /// + /// + /// The CSV data appears to be missing a field. + /// + /// + /// The CSV data appears to be malformed. + /// + /// + /// The instance has been disposed of. + /// + private string ReadField(int field, bool initializing, bool discardValue) + { + if (!initializing) + { + if (field < 0 || field >= _fieldCount) + throw new ArgumentOutOfRangeException("field", field, string.Format(CultureInfo.InvariantCulture, ExceptionMessage.FieldIndexOutOfRange, field)); + + if (_currentRecordIndex < 0) + throw new InvalidOperationException(ExceptionMessage.NoCurrentRecord); + + // Directly return field if cached + if (_fields[field] != null) + return _fields[field]; + else if (_missingFieldFlag) + return HandleMissingField(null, field, ref _nextFieldStart); + } + + CheckDisposed(); + + int index = _nextFieldIndex; + + while (index < field + 1) + { + // Handle case where stated start of field is past buffer + // This can occur because _nextFieldStart is simply 1 + last char position of previous field + if (_nextFieldStart == _bufferLength) + { + _nextFieldStart = 0; + + // Possible EOF will be handled later (see Handle_EOF1) + ReadBuffer(); + } + + string value = null; + + if (_missingFieldFlag) + { + value = HandleMissingField(value, index, ref _nextFieldStart); + } + else if (_nextFieldStart == _bufferLength) + { + // Handle_EOF1: Handle EOF here + + // If current field is the requested field, then the value of the field is "" as in "f1,f2,f3,(\s*)" + // otherwise, the CSV is malformed + + if (index == field) + { + if (!discardValue) + { + value = string.Empty; + _fields[index] = value; + } + + _missingFieldFlag = true; + } + else + { + value = HandleMissingField(value, index, ref _nextFieldStart); + } + } + else + { + // Trim spaces at start + if ((_trimmingOptions & ValueTrimmingOptions.UnquotedOnly) != 0) + SkipWhiteSpaces(ref _nextFieldStart); + + if (_eof) + { + value = string.Empty; + _fields[field] = value; + } + else if (_buffer[_nextFieldStart] != _quote) + { + // Non-quoted field + + int start = _nextFieldStart; + int pos = _nextFieldStart; + + for (; ; ) + { + while (pos < _bufferLength) + { + char c = _buffer[pos]; + + if (c == _delimiter) + { + _nextFieldStart = pos + 1; + + break; + } + else if (c == '\r' || c == '\n') + { + _nextFieldStart = pos; + _eol = true; + + break; + } + else + pos++; + } + + if (pos < _bufferLength) + break; + else + { + if (!discardValue) + value += new string(_buffer, start, pos - start); + + start = 0; + pos = 0; + _nextFieldStart = 0; + + if (!ReadBuffer()) + break; + } + } + + if (!discardValue) + { + if ((_trimmingOptions & ValueTrimmingOptions.UnquotedOnly) == 0) + { + if (!_eof && pos > start) + value += new string(_buffer, start, pos - start); + } + else + { + if (!_eof && pos > start) + { + // Do the trimming + pos--; + while (pos > -1 && IsWhiteSpace(_buffer[pos])) + pos--; + pos++; + + if (pos > 0) + value += new string(_buffer, start, pos - start); + } + else + pos = -1; + + // If pos <= 0, that means the trimming went past buffer start, + // and the concatenated value needs to be trimmed too. + if (pos <= 0) + { + pos = (value == null ? -1 : value.Length - 1); + + // Do the trimming + while (pos > -1 && IsWhiteSpace(value[pos])) + pos--; + + pos++; + + if (pos > 0 && pos != value.Length) + value = value.Substring(0, pos); + } + } + + if (value == null) + value = string.Empty; + } + + if (_eol || _eof) + { + _eol = ParseNewLine(ref _nextFieldStart); + + // Reaching a new line is ok as long as the parser is initializing or it is the last field + if (!initializing && index != _fieldCount - 1) + { + if (value != null && value.Length == 0) + value = null; + + value = HandleMissingField(value, index, ref _nextFieldStart); + } + } + + if (!discardValue) + _fields[index] = value; + } + else + { + // Quoted field + + // Skip quote + int start = _nextFieldStart + 1; + int pos = start; + + bool quoted = true; + bool escaped = false; + + if ((_trimmingOptions & ValueTrimmingOptions.QuotedOnly) != 0) + { + SkipWhiteSpaces(ref start); + pos = start; + } + + for (; ; ) + { + while (pos < _bufferLength) + { + char c = _buffer[pos]; + + if (escaped) + { + escaped = false; + start = pos; + } + // IF current char is escape AND (escape and quote are different OR next char is a quote) + else if (c == _escape && (_escape != _quote || (pos + 1 < _bufferLength && _buffer[pos + 1] == _quote) || (pos + 1 == _bufferLength && _reader.Peek() == _quote))) + { + if (!discardValue) + value += new string(_buffer, start, pos - start); + + escaped = true; + } + else if (c == _quote) + { + quoted = false; + break; + } + + pos++; + } + + if (!quoted) + break; + else + { + if (!discardValue && !escaped) + value += new string(_buffer, start, pos - start); + + start = 0; + pos = 0; + _nextFieldStart = 0; + + if (!ReadBuffer()) + { + HandleParseError(new MalformedCsvException(GetCurrentRawData(), _nextFieldStart, Math.Max(0, _currentRecordIndex), index), ref _nextFieldStart); + return null; + } + } + } + + if (!_eof) + { + // Append remaining parsed buffer content + if (!discardValue && pos > start) + value += new string(_buffer, start, pos - start); + + if (!discardValue && value != null && (_trimmingOptions & ValueTrimmingOptions.QuotedOnly) != 0) + { + int newLength = value.Length; + while (newLength > 0 && IsWhiteSpace(value[newLength - 1])) + newLength--; + + if (newLength < value.Length) + value = value.Substring(0, newLength); + } + + // Skip quote + _nextFieldStart = pos + 1; + + // Skip whitespaces between the quote and the delimiter/eol + SkipWhiteSpaces(ref _nextFieldStart); + + // Skip delimiter + bool delimiterSkipped; + if (_nextFieldStart < _bufferLength && _buffer[_nextFieldStart] == _delimiter) + { + _nextFieldStart++; + delimiterSkipped = true; + } + else + { + delimiterSkipped = false; + } + + // Skip new line delimiter if initializing or last field + // (if the next field is missing, it will be caught when parsed) + if (!_eof && !delimiterSkipped && (initializing || index == _fieldCount - 1)) + _eol = ParseNewLine(ref _nextFieldStart); + + // If no delimiter is present after the quoted field and it is not the last field, then it is a parsing error + if (!delimiterSkipped && !_eof && !(_eol || IsNewLine(_nextFieldStart))) + HandleParseError(new MalformedCsvException(GetCurrentRawData(), _nextFieldStart, Math.Max(0, _currentRecordIndex), index), ref _nextFieldStart); + } + + if (!discardValue) + { + if (value == null) + value = string.Empty; + + _fields[index] = value; + } + } + } + + _nextFieldIndex = Math.Max(index + 1, _nextFieldIndex); + + if (index == field) + { + // If initializing, return null to signify the last field has been reached + + if (initializing) + { + if (_eol || _eof) + return null; + else + return string.IsNullOrEmpty(value) ? string.Empty : value; + } + else + return value; + } + + index++; + } + + // Getting here is bad ... + HandleParseError(new MalformedCsvException(GetCurrentRawData(), _nextFieldStart, Math.Max(0, _currentRecordIndex), index), ref _nextFieldStart); + return null; + } + + #endregion + + #region ReadNextRecord + + /// + /// Reads the next record. + /// + /// if a record has been successfully reads; otherwise, . + /// + /// The instance has been disposed of. + /// + public bool ReadNextRecord() + { + return ReadNextRecord(false, false); + } + + /// + /// Reads the next record. + /// + /// + /// Indicates if the reader will proceed to the next record after having read headers. + /// if it stops after having read headers; otherwise, . + /// + /// + /// Indicates if the reader will skip directly to the next line without parsing the current one. + /// To be used when an error occurs. + /// + /// if a record has been successfully reads; otherwise, . + /// + /// The instance has been disposed of. + /// + protected virtual bool ReadNextRecord(bool onlyReadHeaders, bool skipToNextLine) + { + if (_eof) + { + if (_firstRecordInCache) + { + _firstRecordInCache = false; + _currentRecordIndex++; + + return true; + } + else + return false; + } + + CheckDisposed(); + + if (!_initialized) + { + _buffer = new char[_bufferSize]; + + // will be replaced if and when headers are read + _fieldHeaders = new string[0]; + + if (!ReadBuffer()) + return false; + + if (!SkipEmptyAndCommentedLines(ref _nextFieldStart)) + return false; + + // Keep growing _fields array until the last field has been found + // and then resize it to its final correct size + + _fieldCount = 0; + _fields = new string[16]; + + while (ReadField(_fieldCount, true, false) != null) + { + if (_parseErrorFlag) + { + _fieldCount = 0; + Array.Clear(_fields, 0, _fields.Length); + _parseErrorFlag = false; + _nextFieldIndex = 0; + } + else + { + _fieldCount++; + + if (_fieldCount == _fields.Length) + Array.Resize(ref _fields, (_fieldCount + 1) * 2); + } + } + + // _fieldCount contains the last field index, but it must contains the field count, + // so increment by 1 + _fieldCount++; + + if (_fields.Length != _fieldCount) + Array.Resize(ref _fields, _fieldCount); + + _initialized = true; + + // If headers are present, call ReadNextRecord again + if (_hasHeaders) + { + // Don't count first record as it was the headers + _currentRecordIndex = -1; + + _firstRecordInCache = false; + + _fieldHeaders = new string[_fieldCount]; + _fieldHeaderIndexes = new Dictionary(_fieldCount, _fieldHeaderComparer); + + for (int i = 0; i < _fields.Length; i++) + { + string headerName = _fields[i]; + if (string.IsNullOrEmpty(headerName) || headerName.Trim().Length == 0) + headerName = this.DefaultHeaderName + i.ToString(); + + _fieldHeaders[i] = headerName; + _fieldHeaderIndexes.Add(headerName, i); + } + + // Proceed to first record + if (!onlyReadHeaders) + { + // Calling again ReadNextRecord() seems to be simpler, + // but in fact would probably cause many subtle bugs because a derived class does not expect a recursive behavior + // so simply do what is needed here and no more. + + if (!SkipEmptyAndCommentedLines(ref _nextFieldStart)) + return false; + + Array.Clear(_fields, 0, _fields.Length); + _nextFieldIndex = 0; + _eol = false; + + _currentRecordIndex++; + return true; + } + } + else + { + if (onlyReadHeaders) + { + _firstRecordInCache = true; + _currentRecordIndex = -1; + } + else + { + _firstRecordInCache = false; + _currentRecordIndex = 0; + } + } + } + else + { + if (skipToNextLine) + SkipToNextLine(ref _nextFieldStart); + else if (_currentRecordIndex > -1 && !_missingFieldFlag) + { + // If not already at end of record, move there + if (!_eol && !_eof) + { + if (!_supportsMultiline) + SkipToNextLine(ref _nextFieldStart); + else + { + // a dirty trick to handle the case where extra fields are present + while (ReadField(_nextFieldIndex, true, true) != null) + { + } + } + } + } + + if (!_firstRecordInCache && !SkipEmptyAndCommentedLines(ref _nextFieldStart)) + return false; + + if (_hasHeaders || !_firstRecordInCache) + _eol = false; + + // Check to see if the first record is in cache. + // This can happen when initializing a reader with no headers + // because one record must be read to get the field count automatically + if (_firstRecordInCache) + _firstRecordInCache = false; + else + { + Array.Clear(_fields, 0, _fields.Length); + _nextFieldIndex = 0; + } + + _missingFieldFlag = false; + _parseErrorFlag = false; + _currentRecordIndex++; + } + + return true; + } + + #endregion + + #region SkipEmptyAndCommentedLines + + /// + /// Skips empty and commented lines. + /// If the end of the buffer is reached, its content be discarded and filled again from the reader. + /// + /// + /// The position in the buffer where to start parsing. + /// Will contains the resulting position after the operation. + /// + /// if the end of the reader has not been reached; otherwise, . + /// + /// The instance has been disposed of. + /// + private bool SkipEmptyAndCommentedLines(ref int pos) + { + if (pos < _bufferLength) + DoSkipEmptyAndCommentedLines(ref pos); + + while (pos >= _bufferLength && !_eof) + { + if (ReadBuffer()) + { + pos = 0; + DoSkipEmptyAndCommentedLines(ref pos); + } + else + return false; + } + + return !_eof; + } + + /// + /// Worker method. + /// Skips empty and commented lines. + /// + /// + /// The position in the buffer where to start parsing. + /// Will contains the resulting position after the operation. + /// + /// + /// The instance has been disposed of. + /// + private void DoSkipEmptyAndCommentedLines(ref int pos) + { + while (pos < _bufferLength) + { + if (_buffer[pos] == _comment) + { + pos++; + SkipToNextLine(ref pos); + } + else if (_skipEmptyLines && ParseNewLine(ref pos)) + continue; + else + break; + } + } + + #endregion + + #region SkipWhiteSpaces + + /// + /// Skips whitespace characters. + /// + /// The starting position of the parsing. Will contain the resulting end position. + /// if the end of the reader has not been reached; otherwise, . + /// + /// The instance has been disposed of. + /// + private bool SkipWhiteSpaces(ref int pos) + { + for (; ; ) + { + while (pos < _bufferLength && IsWhiteSpace(_buffer[pos])) + pos++; + + if (pos < _bufferLength) + break; + else + { + pos = 0; + + if (!ReadBuffer()) + return false; + } + } + + return true; + } + + #endregion + + #region SkipToNextLine + + /// + /// Skips ahead to the next NewLine character. + /// If the end of the buffer is reached, its content be discarded and filled again from the reader. + /// + /// + /// The position in the buffer where to start parsing. + /// Will contains the resulting position after the operation. + /// + /// if the end of the reader has not been reached; otherwise, . + /// + /// The instance has been disposed of. + /// + private bool SkipToNextLine(ref int pos) + { + // ((pos = 0) == 0) is a little trick to reset position inline + while ((pos < _bufferLength || (ReadBuffer() && ((pos = 0) == 0))) && !ParseNewLine(ref pos)) + pos++; + + return !_eof; + } + + #endregion + + #region HandleParseError + + /// + /// Handles a parsing error. + /// + /// The parsing error that occured. + /// The current position in the buffer. + /// + /// is . + /// + private void HandleParseError(MalformedCsvException error, ref int pos) + { + if (error == null) + throw new ArgumentNullException("error"); + + _parseErrorFlag = true; + + switch (_defaultParseErrorAction) + { + case ParseErrorAction.ThrowException: + throw error; + + case ParseErrorAction.RaiseEvent: + ParseErrorEventArgs e = new ParseErrorEventArgs(error, ParseErrorAction.ThrowException); + OnParseError(e); + + switch (e.Action) + { + case ParseErrorAction.ThrowException: + throw e.Error; + + case ParseErrorAction.RaiseEvent: + throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, ExceptionMessage.ParseErrorActionInvalidInsideParseErrorEvent, e.Action), e.Error); + + case ParseErrorAction.AdvanceToNextLine: + // already at EOL when fields are missing, so don't skip to next line in that case + if (!_missingFieldFlag && pos >= 0) + SkipToNextLine(ref pos); + break; + + default: + throw new NotSupportedException(string.Format(CultureInfo.InvariantCulture, ExceptionMessage.ParseErrorActionNotSupported, e.Action), e.Error); + } + break; + + case ParseErrorAction.AdvanceToNextLine: + // already at EOL when fields are missing, so don't skip to next line in that case + if (!_missingFieldFlag && pos >= 0) + SkipToNextLine(ref pos); + break; + + default: + throw new NotSupportedException(string.Format(CultureInfo.InvariantCulture, ExceptionMessage.ParseErrorActionNotSupported, _defaultParseErrorAction), error); + } + } + + #endregion + + #region HandleMissingField + + /// + /// Handles a missing field error. + /// + /// The partially parsed value, if available. + /// The missing field index. + /// The current position in the raw data. + /// + /// The resulting value according to . + /// If the action is set to , + /// then the parse error will be handled according to . + /// + private string HandleMissingField(string value, int fieldIndex, ref int currentPosition) + { + if (fieldIndex < 0 || fieldIndex >= _fieldCount) + throw new ArgumentOutOfRangeException("fieldIndex", fieldIndex, string.Format(CultureInfo.InvariantCulture, ExceptionMessage.FieldIndexOutOfRange, fieldIndex)); + + _missingFieldFlag = true; + + for (int i = fieldIndex + 1; i < _fieldCount; i++) + _fields[i] = null; + + if (value != null) + return value; + else + { + switch (_missingFieldAction) + { + case MissingFieldAction.ParseError: + HandleParseError(new MissingFieldCsvException(GetCurrentRawData(), currentPosition, Math.Max(0, _currentRecordIndex), fieldIndex), ref currentPosition); + return value; + + case MissingFieldAction.ReplaceByEmpty: + return string.Empty; + + case MissingFieldAction.ReplaceByNull: + return null; + + default: + throw new NotSupportedException(string.Format(CultureInfo.InvariantCulture, ExceptionMessage.MissingFieldActionNotSupported, _missingFieldAction)); + } + } + } + + #endregion + + #endregion + + #region IDataReader support methods + + /// + /// Validates the state of the data reader. + /// + /// The validations to accomplish. + /// + /// No current record. + /// + /// + /// This operation is invalid when the reader is closed. + /// + private void ValidateDataReader(DataReaderValidations validations) + { + if ((validations & DataReaderValidations.IsInitialized) != 0 && !_initialized) + throw new InvalidOperationException(ExceptionMessage.NoCurrentRecord); + + if ((validations & DataReaderValidations.IsNotClosed) != 0 && _isDisposed) + throw new InvalidOperationException(ExceptionMessage.ReaderClosed); + } + + /// + /// Copy the value of the specified field to an array. + /// + /// The index of the field. + /// The offset in the field value. + /// The destination array where the field value will be copied. + /// The destination array offset. + /// The number of characters to copy from the field value. + /// + private long CopyFieldToArray(int field, long fieldOffset, Array destinationArray, int destinationOffset, int length) + { + EnsureInitialize(); + + if (field < 0 || field >= _fieldCount) + throw new ArgumentOutOfRangeException("field", field, string.Format(CultureInfo.InvariantCulture, ExceptionMessage.FieldIndexOutOfRange, field)); + + if (fieldOffset < 0 || fieldOffset >= int.MaxValue) + throw new ArgumentOutOfRangeException("fieldOffset"); + + // Array.Copy(...) will do the remaining argument checks + + if (length == 0) + return 0; + + string value = this[field]; + + if (value == null) + value = string.Empty; + + Debug.Assert(fieldOffset < int.MaxValue); + + Debug.Assert(destinationArray.GetType() == typeof(char[]) || destinationArray.GetType() == typeof(byte[])); + + if (destinationArray.GetType() == typeof(char[])) + Array.Copy(value.ToCharArray((int)fieldOffset, length), 0, destinationArray, destinationOffset, length); + else + { + char[] chars = value.ToCharArray((int)fieldOffset, length); + byte[] source = new byte[chars.Length]; ; + + for (int i = 0; i < chars.Length; i++) + source[i] = Convert.ToByte(chars[i]); + + Array.Copy(source, 0, destinationArray, destinationOffset, length); + } + + return length; + } + + #endregion + + #region IDataReader Members + + int IDataReader.RecordsAffected + { + get + { + // For SELECT statements, -1 must be returned. + return -1; + } + } + + bool IDataReader.IsClosed + { + get + { + return _eof; + } + } + + bool IDataReader.NextResult() + { + ValidateDataReader(DataReaderValidations.IsNotClosed); + + return false; + } + + void IDataReader.Close() + { + Dispose(); + } + + bool IDataReader.Read() + { + ValidateDataReader(DataReaderValidations.IsNotClosed); + + return ReadNextRecord(); + } + + int IDataReader.Depth + { + get + { + ValidateDataReader(DataReaderValidations.IsNotClosed); + + return 0; + } + } + + DataTable IDataReader.GetSchemaTable() + { + EnsureInitialize(); + ValidateDataReader(DataReaderValidations.IsNotClosed); + + DataTable schema = new DataTable("SchemaTable"); + schema.Locale = CultureInfo.InvariantCulture; + schema.MinimumCapacity = _fieldCount; + + schema.Columns.Add(SchemaTableColumn.AllowDBNull, typeof(bool)).ReadOnly = true; + schema.Columns.Add(SchemaTableColumn.BaseColumnName, typeof(string)).ReadOnly = true; + schema.Columns.Add(SchemaTableColumn.BaseSchemaName, typeof(string)).ReadOnly = true; + schema.Columns.Add(SchemaTableColumn.BaseTableName, typeof(string)).ReadOnly = true; + schema.Columns.Add(SchemaTableColumn.ColumnName, typeof(string)).ReadOnly = true; + schema.Columns.Add(SchemaTableColumn.ColumnOrdinal, typeof(int)).ReadOnly = true; + schema.Columns.Add(SchemaTableColumn.ColumnSize, typeof(int)).ReadOnly = true; + schema.Columns.Add(SchemaTableColumn.DataType, typeof(object)).ReadOnly = true; + schema.Columns.Add(SchemaTableColumn.IsAliased, typeof(bool)).ReadOnly = true; + schema.Columns.Add(SchemaTableColumn.IsExpression, typeof(bool)).ReadOnly = true; + schema.Columns.Add(SchemaTableColumn.IsKey, typeof(bool)).ReadOnly = true; + schema.Columns.Add(SchemaTableColumn.IsLong, typeof(bool)).ReadOnly = true; + schema.Columns.Add(SchemaTableColumn.IsUnique, typeof(bool)).ReadOnly = true; + schema.Columns.Add(SchemaTableColumn.NumericPrecision, typeof(short)).ReadOnly = true; + schema.Columns.Add(SchemaTableColumn.NumericScale, typeof(short)).ReadOnly = true; + schema.Columns.Add(SchemaTableColumn.ProviderType, typeof(int)).ReadOnly = true; + + schema.Columns.Add(SchemaTableOptionalColumn.BaseCatalogName, typeof(string)).ReadOnly = true; + schema.Columns.Add(SchemaTableOptionalColumn.BaseServerName, typeof(string)).ReadOnly = true; + schema.Columns.Add(SchemaTableOptionalColumn.IsAutoIncrement, typeof(bool)).ReadOnly = true; + schema.Columns.Add(SchemaTableOptionalColumn.IsHidden, typeof(bool)).ReadOnly = true; + schema.Columns.Add(SchemaTableOptionalColumn.IsReadOnly, typeof(bool)).ReadOnly = true; + schema.Columns.Add(SchemaTableOptionalColumn.IsRowVersion, typeof(bool)).ReadOnly = true; + + string[] columnNames; + + if (_hasHeaders) + columnNames = _fieldHeaders; + else + { + columnNames = new string[_fieldCount]; + + for (int i = 0; i < _fieldCount; i++) + columnNames[i] = "Column" + i.ToString(CultureInfo.InvariantCulture); + } + + // null marks columns that will change for each row + object[] schemaRow = new object[] { + true, // 00- AllowDBNull + null, // 01- BaseColumnName + string.Empty, // 02- BaseSchemaName + string.Empty, // 03- BaseTableName + null, // 04- ColumnName + null, // 05- ColumnOrdinal + int.MaxValue, // 06- ColumnSize + typeof(string), // 07- DataType + false, // 08- IsAliased + false, // 09- IsExpression + false, // 10- IsKey + false, // 11- IsLong + false, // 12- IsUnique + DBNull.Value, // 13- NumericPrecision + DBNull.Value, // 14- NumericScale + (int) DbType.String, // 15- ProviderType + + string.Empty, // 16- BaseCatalogName + string.Empty, // 17- BaseServerName + false, // 18- IsAutoIncrement + false, // 19- IsHidden + true, // 20- IsReadOnly + false // 21- IsRowVersion + }; + + for (int i = 0; i < columnNames.Length; i++) + { + schemaRow[1] = columnNames[i]; // Base column name + schemaRow[4] = columnNames[i]; // Column name + schemaRow[5] = i; // Column ordinal + + schema.Rows.Add(schemaRow); + } + + return schema; + } + + #endregion + + #region IDataRecord Members + + int IDataRecord.GetInt32(int i) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + + string value = this[i]; + + return Int32.Parse(value == null ? string.Empty : value, CultureInfo.CurrentCulture); + } + + object IDataRecord.this[string name] + { + get + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + return this[name]; + } + } + + object IDataRecord.this[int i] + { + get + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + return this[i]; + } + } + + object IDataRecord.GetValue(int i) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + + if (((IDataRecord)this).IsDBNull(i)) + return DBNull.Value; + else + return this[i]; + } + + bool IDataRecord.IsDBNull(int i) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + return (string.IsNullOrEmpty(this[i])); + } + + long IDataRecord.GetBytes(int i, long fieldOffset, byte[] buffer, int bufferoffset, int length) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + + return CopyFieldToArray(i, fieldOffset, buffer, bufferoffset, length); + } + + byte IDataRecord.GetByte(int i) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + return Byte.Parse(this[i], CultureInfo.CurrentCulture); + } + + Type IDataRecord.GetFieldType(int i) + { + EnsureInitialize(); + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + + if (i < 0 || i >= _fieldCount) + throw new ArgumentOutOfRangeException("i", i, string.Format(CultureInfo.InvariantCulture, ExceptionMessage.FieldIndexOutOfRange, i)); + + return typeof(string); + } + + decimal IDataRecord.GetDecimal(int i) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + return Decimal.Parse(this[i], CultureInfo.CurrentCulture); + } + + int IDataRecord.GetValues(object[] values) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + + IDataRecord record = (IDataRecord)this; + + for (int i = 0; i < _fieldCount; i++) + values[i] = record.GetValue(i); + + return _fieldCount; + } + + string IDataRecord.GetName(int i) + { + EnsureInitialize(); + ValidateDataReader(DataReaderValidations.IsNotClosed); + + if (i < 0 || i >= _fieldCount) + throw new ArgumentOutOfRangeException("i", i, string.Format(CultureInfo.InvariantCulture, ExceptionMessage.FieldIndexOutOfRange, i)); + + if (_hasHeaders) + return _fieldHeaders[i]; + else + return "Column" + i.ToString(CultureInfo.InvariantCulture); + } + + long IDataRecord.GetInt64(int i) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + return Int64.Parse(this[i], CultureInfo.CurrentCulture); + } + + double IDataRecord.GetDouble(int i) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + return Double.Parse(this[i], CultureInfo.CurrentCulture); + } + + bool IDataRecord.GetBoolean(int i) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + + string value = this[i]; + + int result; + + if (Int32.TryParse(value, out result)) + return (result != 0); + else + return Boolean.Parse(value); + } + + Guid IDataRecord.GetGuid(int i) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + return new Guid(this[i]); + } + + DateTime IDataRecord.GetDateTime(int i) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + return DateTime.Parse(this[i], CultureInfo.CurrentCulture); + } + + int IDataRecord.GetOrdinal(string name) + { + EnsureInitialize(); + ValidateDataReader(DataReaderValidations.IsNotClosed); + + int index; + + if (!_fieldHeaderIndexes.TryGetValue(name, out index)) + throw new ArgumentException(string.Format(CultureInfo.InvariantCulture, ExceptionMessage.FieldHeaderNotFound, name), "name"); + + return index; + } + + string IDataRecord.GetDataTypeName(int i) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + return typeof(string).FullName; + } + + float IDataRecord.GetFloat(int i) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + return Single.Parse(this[i], CultureInfo.CurrentCulture); + } + + IDataReader IDataRecord.GetData(int i) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + + if (i == 0) + return this; + else + return null; + } + + long IDataRecord.GetChars(int i, long fieldoffset, char[] buffer, int bufferoffset, int length) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + + return CopyFieldToArray(i, fieldoffset, buffer, bufferoffset, length); + } + + string IDataRecord.GetString(int i) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + return this[i]; + } + + char IDataRecord.GetChar(int i) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + return Char.Parse(this[i]); + } + + short IDataRecord.GetInt16(int i) + { + ValidateDataReader(DataReaderValidations.IsInitialized | DataReaderValidations.IsNotClosed); + return Int16.Parse(this[i], CultureInfo.CurrentCulture); + } + + #endregion + + #region IEnumerable Members + + /// + /// Returns an that can iterate through CSV records. + /// + /// An that can iterate through CSV records. + /// + /// The instance has been disposed of. + /// + public CsvReader.RecordEnumerator GetEnumerator() + { + return new CsvReader.RecordEnumerator(this); + } + + /// + /// Returns an that can iterate through CSV records. + /// + /// An that can iterate through CSV records. + /// + /// The instance has been disposed of. + /// + IEnumerator IEnumerable.GetEnumerator() + { + return this.GetEnumerator(); + } + + #endregion + + #region IEnumerable Members + + /// + /// Returns an that can iterate through CSV records. + /// + /// An that can iterate through CSV records. + /// + /// The instance has been disposed of. + /// + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + + #endregion + + #region IDisposable members + +#if DEBUG + /// + /// Contains the stack when the object was allocated. + /// + private System.Diagnostics.StackTrace _allocStack; +#endif + + /// + /// Contains the disposed status flag. + /// + private bool _isDisposed = false; + + /// + /// Contains the locking object for multi-threading purpose. + /// + private readonly object _lock = new object(); + + /// + /// Occurs when the instance is disposed of. + /// + public event EventHandler Disposed; + + /// + /// Gets a value indicating whether the instance has been disposed of. + /// + /// + /// if the instance has been disposed of; otherwise, . + /// + [System.ComponentModel.Browsable(false)] + public bool IsDisposed + { + get { return _isDisposed; } + } + + /// + /// Raises the event. + /// + /// A that contains the event data. + protected virtual void OnDisposed(EventArgs e) + { + EventHandler handler = Disposed; + + if (handler != null) + handler(this, e); + } + + /// + /// Checks if the instance has been disposed of, and if it has, throws an ; otherwise, does nothing. + /// + /// + /// The instance has been disposed of. + /// + /// + /// Derived classes should call this method at the start of all methods and properties that should not be accessed after a call to . + /// + protected void CheckDisposed() + { + if (_isDisposed) + throw new ObjectDisposedException(this.GetType().FullName); + } + + /// + /// Releases all resources used by the instance. + /// + /// + /// Calls with the disposing parameter set to to free unmanaged and managed resources. + /// + public void Dispose() + { + if (!_isDisposed) + { + Dispose(true); + GC.SuppressFinalize(this); + } + } + + /// + /// Releases the unmanaged resources used by this instance and optionally releases the managed resources. + /// + /// + /// to release both managed and unmanaged resources; to release only unmanaged resources. + /// + protected virtual void Dispose(bool disposing) + { + // Refer to http://www.bluebytesoftware.com/blog/PermaLink,guid,88e62cdf-5919-4ac7-bc33-20c06ae539ae.aspx + // Refer to http://www.gotdotnet.com/team/libraries/whitepapers/resourcemanagement/resourcemanagement.aspx + + // No exception should ever be thrown except in critical scenarios. + // Unhandled exceptions during finalization will tear down the process. + if (!_isDisposed) + { + try + { + // Dispose-time code should call Dispose() on all owned objects that implement the IDisposable interface. + // "owned" means objects whose lifetime is solely controlled by the container. + // In cases where ownership is not as straightforward, techniques such as HandleCollector can be used. + // Large managed object fields should be nulled out. + + // Dispose-time code should also set references of all owned objects to null, after disposing them. This will allow the referenced objects to be garbage collected even if not all references to the "parent" are released. It may be a significant memory consumption win if the referenced objects are large, such as big arrays, collections, etc. + if (disposing) + { + // Acquire a lock on the object while disposing. + + if (_reader != null) + { + lock (_lock) + { + if (_reader != null) + { + _reader.Dispose(); + + _reader = null; + _buffer = null; + _eof = true; + } + } + } + } + } + finally + { + // Ensure that the flag is set + _isDisposed = true; + + // Catch any issues about firing an event on an already disposed object. + try + { + OnDisposed(EventArgs.Empty); + } + catch { } + } + } + } + + /// + /// Releases unmanaged resources and performs other cleanup operations before the instance is reclaimed by garbage collection. + /// + ~CsvReader() + { +#if DEBUG + Debug.WriteLine("FinalizableObject was not disposed" + _allocStack.ToString()); +#endif + + Dispose(false); + } + + #endregion + } +} \ No newline at end of file Index: DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvImporterLocations.cs =================================================================== diff -u -r3102 -r3110 --- DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvImporterLocations.cs (.../CsvImporterLocations.cs) (revision 3102) +++ DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvImporterLocations.cs (.../CsvImporterLocations.cs) (revision 3110) @@ -18,17 +18,19 @@ // All names, logos, and references to "Deltares" are registered trademarks of // Stichting Deltares and remain full property of Stichting Deltares at all times. // All rights reserved. + using System; using System.Collections.Generic; using System.IO; using System.Linq; -using System.Text; using System.Threading; -using System.Threading.Tasks; -using Microsoft.SqlServer.Server; +using LumenWorks.Framework.IO.Csv; namespace Deltares.LayerOnSlopeTool.Data.CsvImporters { + /// + /// Holds the importer for the locations from csv files + /// public class CsvImporterLocations { private List locationRecords = new List(); @@ -70,23 +72,50 @@ /// /// The x soil geometry2 d origin. /// - public double? XSoilGeometry2DOrigin { get; set; } + public double? XSoilGeometry2DOrigin { get; set; } // Only optional item /// /// Gets or sets the dike embankment material. This is to be used as filling material for surface between the original 2D sti profile and the surface line. /// /// /// The dike embankment material. /// - public string DikeEmbankmentMaterial { get; set; } - - public string LayerMaterial { get; set; } - - public double LayerThickness { get; set; } - + public string DikeEmbankmentMaterial { get; set; } // Now required, not optional + /// + /// Gets or sets the name of the soil geometry. + /// + /// + /// The name of the soil geometry. + /// + public string SoilGeometryName { get; set; } //NEW + + /// + /// Gets or sets the layer material. + /// + /// + /// The layer material. + /// + public string LayerMaterial { get; set; } //NEW + + /// + /// Gets or sets the layer thickness. + /// + /// + /// The layer thickness. + /// + public double LayerThickness { get; set; } //NEW } + /// + /// Initializes a new instance of the class, filling the records from the csv. + /// + /// Name of the file. + /// + /// De filenaam voor de locations csv is leeg. + /// or + /// or + /// public CsvImporterLocations(string fileName) { errorMessages.Clear(); @@ -97,448 +126,95 @@ if (!File.Exists(fileName)) { - throw new ArgumentException(string.Format("De loacations csv file met naam {0} bestaat niet.", fileName)); + throw new ArgumentException(string.Format("De locations csv file met naam {0} bestaat niet.", fileName)); } var oldcur = Thread.CurrentThread.CurrentCulture; try { - // Thread.CurrentThread.CurrentCulture = CsvReaderUtilities.DetermineCultureForFile(fileName); - // using (var csv = new CsvReader(new StreamReader(fileName), true, ';')) - // { - // string[] headers = CsvImporterHelper.GetFieldHeaders(this, csv); - // if (headers.Count() < 3) - // { - // var csvHeaderError = LocalizationManager.GetTranslatedText(this.GetType(), "csvHeaderError"); - // throw new CsvImporterSegmentsException(String.Format("{0} : {1}", fileName, csvHeaderError)); - // } - // - // int colIndexLocationId = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.LocationColumnName); - // int colIndexSurfacelineId = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.SurfaceLineColumnName); - // int colIndexSegmentId = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.SegmentColumnName); - // int colIndexDikeRingId = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.DikeRingColumnName); - // int colIndexGeoX = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.XValueColumnName); - // int colIndexGeoY = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.YValueColumnName); - // int colIndexXSoilgeometry2DOrigin = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.XSoilGeometry2DOriginColumnName); - // int colIndexPolderlevel = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.PolderLevelColumnName); - // int colIndexHeadPl2 = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.HeadPl2ColumnName); - // int colIndexHeadPl3 = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.HeadPl3ColumnName); - // int colIndexHeadPl4 = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.HeadPl4ColumnName); - // int colIndexOphoogmateriaaldijk = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.DikeEmbankmentMaterialColumnName); - // int colIndexOphoogmateriaalberm = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.ShoulderEmbankmentMaterialColumnName); - // int colIndexPenetrationLength = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.PenetrationLengthColumnName); - // int colIndexTrafficLoad = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.TrafficLoadColumnName); - // int colIndexTL_DegreeOfConsolidation = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.TlDegreeOfConsolidationColumnName); - // int colIndexMinimalCircleDepth = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.MinimalCircleDepthColumnName); - // int colIndexDempingsfactorPl3 = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.DampingFactorPl3ColumnName); - // int colIndexDempingsfactorPl4 = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.DampingFactorPl4ColumnName); - // int colIndexPLLineCreationMethod = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.PlLineCreationMethodColumnName); - // int colIndexSafetyFactorPiping = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.RequiredSafetyFactorPipingColumnName); - // int colIndexSafetyFactorStabilityInnerSlope = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.RequiredSafetyFactorStabilityInnerSlopeColumnName); - // int colIndexSafetyFactorStabilityOuterSlope = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.RequiredSafetyFactorStabilityOuterSlopeColumnName); - // int colIndexUpliftCriterionPiping = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.UpliftCriterionPipingColumnName); - // int colIndexUpliftCriterionStability = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.UpliftCriterionStabilityColumnName); - // int colIndexDistanceToEntryPoint = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.DistanceToEntryPointColumnName); - // int colIndexPLLineOffsetBelowDikeTopAtRiver = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.PlLineOffsetBelowDikeTopAtRiverColumnName); - // int colIndexPLLineOffsetBelowDikeTopAtPolder = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.PlLineOffsetBelowDikeTopAtPolderColumnName); - // int colIndexPLLineOffsetBelowShoulderBaseInside = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.PlLineOffsetBelowShoulderBaseInsideColumnName); - // int colIndexPLLineOffsetBelowDikeToeAtPolder = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.PlLineOffsetBelowDikeToeAtPolderColumnName); - // int colIndexPlLineOffsetBelowDikeCrestMiddle = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.PlLineOffsetBelowDikeCrestMiddleColumnName); - // int colIndexPlLineOffsetFactorBelowShoulderCrest = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.PlLineOffsetFactorBelowShoulderCrestColumnName); - // int colIndexUsePlLineOffsetBelowDikeCrestMiddle = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.UsePlLineOffsetBelowDikeCrestMiddleColumnName); - // int colIndexUsePlLineOffsetFactorBelowShoulderCrest = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.UsePlLineOffsetFactorBelowShoulderCrestColumnName); - // int colIndexIntrusionVerticalWaterPressure = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.IntrusionVerticalWaterPressureColumnName); - // int colIndexStabilityShoulderGrowSlope = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.StabilityShoulderGrowSlopeColumnName); - // int colIndexStabilityShoulderGrowDeltaX = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.StabilityShoulderGrowDeltaXColumnName); - // int colIndexStabilitySlopeAdaptionDeltaX = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.StabilitySlopeAdaptionDeltaXColumnName); - // int colIndexSlopeDampingPiezometricHeightPolderSide = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.SlopeDampingPiezometricHeightPolderSideColumnName); - // int colIndexStabilityDesignMethod = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.StabilityDesignMethodColumnName); - // int colIndexStabilityZoneType = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.ZoneTypeColumnName); - // int colIndexForbiddenZoneFactor = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.ForbiddenZoneFactorColumnName); - // int colIndexSlopeAdaptionStartCotangent = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.SlopeAdaptionStartCotangentColumnName); - // int colIndexSlopeAdaptionEndCotangent = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.SlopeAdaptionEndCotangentColumnName); - // int colIndexSlopeAdaptionStepCotangent = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.SlopeAdaptionStepCotangentColumnName); - // int colIndexNewDikeTopWidth = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.NewDikeTopWidthColumnName); - // int colIndexUseNewDikeTopWidth = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.UseNewDikeTopWidthColumnName); - // int colIndexNewDikeSlopeInside = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.NewDikeSlopeInsideColumnName); - // int colIndexUseNewDikeSlopeInside = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.UseNewDikeSlopeInsideColumnName); - // int colIndexNewDikeSlopeOutside = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.NewDikeSlopeOutsideColumnName); - // int colIndexUseNewDikeSlopeOutside = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.UseNewDikeSlopeOutsideColumnName); - // int colIndexNewShoulderTopSlope = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.NewShoulderTopSlopeColumnName); - // int colIndexUseNewShoulderTopSlope = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.UseNewShoulderTopSlopeColumnName); - // int colIndexNewShoulderBaseSlope = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.NewShoulderBaseSlopeColumnName); - // int colIndexUseNewShoulderBaseSlope = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.UseNewShoulderBaseSlopeColumnName); - // int colIndexNewMaxHeightShoulderAsFraction = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.NewMaxHeightShoulderAsFractionColumnName); - // int colIndexUseNewMaxHeightShoulderAsFraction = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.UseNewMaxHeightShoulderAsFractionColumnName); - // int colIndexNewMinDistanceDikeToeStartDitch = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.NewMinDistanceDikeToeStartDitchColumnName); - // int colIndexUseNewMinDistanceDikeToeStartDitch = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.UseNewMinDistanceDikeToeStartDitchColumnName); - // int colIndexUseNewDitchDefinition = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.UseNewDitchDefinitionColumnName); - // int colIndexNewWidthDitchBottom = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.NewWidthDitchBottomColumnName); - // int colIndexNewSlopeAngleDitch = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.NewSlopeAngleDitchColumnName); - // int colIndexNewDepthDitch = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.NewDepthDitchColumnName); - // int colIndexDikeTableHeight = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.DikeTableHeightColumnName); - // int colIndexRiverLevel = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.RiverLevelColumnName); - // int colIndexRiverLevelLow = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.RiverLevelLowColumnName); - // - // var index = 1; - // while (csv.ReadNextRecord()) - // { - // var locationRecord = new LocationRecord(); - // var colIndex = -1; // Keep track of column for error message - // try - // { - // locationRecord.LocationRecordId = index++; - // - // /* - // * Required columns - // */ - // locationRecord.LocationId = csv[colIndexLocationId]; - // locationRecord.SurfaceLineId = csv[colIndexSurfacelineId]; - // locationRecord.SegmentId = csv[colIndexSegmentId]; - // - // /* - // * Optional columns - // */ - // if (colIndexDikeRingId > -1) - // { - // colIndex = colIndexDikeRingId; - // locationRecord.DikeRingId = csv[colIndexDikeRingId]; - // } - // if (colIndexGeoX > -1) - // { - // colIndex = colIndexGeoX; - // locationRecord.GeoX = Convert.ToDouble(csv[colIndexGeoX]); - // } - // if (colIndexGeoY > -1) - // { - // colIndex = colIndexGeoY; - // locationRecord.GeoY = Convert.ToDouble(csv[colIndexGeoY]); - // } - // if (colIndexXSoilgeometry2DOrigin > -1) - // { - // colIndex = colIndexXSoilgeometry2DOrigin; - // locationRecord.XSoilGeometry2DOrigin = Convert.ToDouble(csv[colIndexXSoilgeometry2DOrigin]); - // } - // if (colIndexPolderlevel > -1) - // { - // colIndex = colIndexPolderlevel; - // locationRecord.PolderLevel = Convert.ToDouble(csv[colIndexPolderlevel]); - // } - // if (colIndexHeadPl2 > -1) - // { - // colIndex = colIndexHeadPl2; - // locationRecord.HeadPl2 = Convert.ToDouble(csv[colIndexHeadPl2]); - // } - // if (colIndexHeadPl3 > -1) - // { - // colIndex = colIndexHeadPl3; - // locationRecord.HeadPl3 = Convert.ToDouble(csv[colIndexHeadPl3]); - // } - // if (colIndexHeadPl4 > -1) - // { - // colIndex = colIndexHeadPl4; - // locationRecord.HeadPl4 = Convert.ToDouble(csv[colIndexHeadPl4]); - // } - // if (colIndexOphoogmateriaaldijk > -1) - // { - // colIndex = colIndexOphoogmateriaaldijk; - // locationRecord.DikeEmbankmentMaterial = csv[colIndexOphoogmateriaaldijk]; - // } - // if (colIndexOphoogmateriaalberm > -1) - // { - // colIndex = colIndexOphoogmateriaalberm; - // locationRecord.ShoulderEmbankmentMaterial = csv[colIndexOphoogmateriaalberm]; - // } - // if (colIndexPenetrationLength > -1) - // { - // colIndex = colIndexPenetrationLength; - // locationRecord.PenetrationLength = Convert.ToDouble(csv[colIndexPenetrationLength]); - // } - // if (colIndexTrafficLoad > -1) - // { - // colIndex = colIndexTrafficLoad; - // locationRecord.TrafficLoad = Convert.ToDouble(csv[colIndexTrafficLoad]); - // } - // if (colIndexTL_DegreeOfConsolidation > -1) - // { - // colIndex = colIndexTL_DegreeOfConsolidation; - // locationRecord.TL_DegreeOfConsolidation = Convert.ToDouble(csv[colIndexTL_DegreeOfConsolidation]); - // } - // if (colIndexMinimalCircleDepth > -1) - // { - // colIndex = colIndexMinimalCircleDepth; - // locationRecord.MinimalCircleDepth = Convert.ToDouble(csv[colIndexMinimalCircleDepth]); - // } - // if (colIndexDempingsfactorPl3 > -1) - // { - // colIndex = colIndexDempingsfactorPl3; - // locationRecord.DampingFactorPl3 = Convert.ToDouble(csv[colIndexDempingsfactorPl3]); - // } - // if (colIndexDempingsfactorPl4 > -1) - // { - // colIndex = colIndexDempingsfactorPl4; - // locationRecord.DampingFactorPl4 = Convert.ToDouble(csv[colIndexDempingsfactorPl4]); - // } - // if (colIndexPLLineCreationMethod > -1) - // { - // colIndex = colIndexPLLineCreationMethod; - // locationRecord.PLLineCreationMethod = LocationImportHelper.ToPLLineCreationMethod(csv[colIndexPLLineCreationMethod]); - // } - // if (colIndexSafetyFactorPiping > -1) - // { - // colIndex = colIndexSafetyFactorPiping; - // locationRecord.RequiredSafetyFactorPiping = Convert.ToDouble(csv[colIndexSafetyFactorPiping]); - // } - // if (colIndexSafetyFactorStabilityInnerSlope > -1) - // { - // colIndex = colIndexSafetyFactorStabilityInnerSlope; - // locationRecord.RequiredSafetyFactorStabilityInnerSlope = Convert.ToDouble(csv[colIndexSafetyFactorStabilityInnerSlope]); - // } - // if (colIndexSafetyFactorStabilityOuterSlope > -1) - // { - // colIndex = colIndexSafetyFactorStabilityOuterSlope; - // locationRecord.RequiredSafetyFactorStabilityOuterSlope = Convert.ToDouble(csv[colIndexSafetyFactorStabilityOuterSlope]); - // } - // if (colIndexUpliftCriterionPiping > -1) - // { - // colIndex = colIndexUpliftCriterionPiping; - // locationRecord.UpliftCriterionPiping = Convert.ToDouble(csv[colIndexUpliftCriterionPiping]); - // } - // if (colIndexUpliftCriterionStability > -1) - // { - // colIndex = colIndexUpliftCriterionStability; - // locationRecord.UpliftCriterionStability = Convert.ToDouble(csv[colIndexUpliftCriterionStability]); - // } - // if (colIndexDistanceToEntryPoint > -1) - // { - // colIndex = colIndexDistanceToEntryPoint; - // locationRecord.DistanceToEntryPoint = Convert.ToDouble(csv[colIndexDistanceToEntryPoint]); - // } - // if (colIndexPLLineOffsetBelowDikeTopAtRiver > -1) - // { - // colIndex = colIndexPLLineOffsetBelowDikeTopAtRiver; - // locationRecord.PlLineOffsetBelowDikeTopAtRiver = Convert.ToDouble(csv[colIndexPLLineOffsetBelowDikeTopAtRiver]); - // } - // if (colIndexPLLineOffsetBelowDikeTopAtPolder > -1) - // { - // colIndex = colIndexPLLineOffsetBelowDikeTopAtPolder; - // locationRecord.PlLineOffsetBelowDikeTopAtPolder = Convert.ToDouble(csv[colIndexPLLineOffsetBelowDikeTopAtPolder]); - // } - // if (colIndexPLLineOffsetBelowShoulderBaseInside > -1) - // { - // colIndex = colIndexPLLineOffsetBelowShoulderBaseInside; - // locationRecord.PlLineOffsetBelowShoulderBaseInside = Convert.ToDouble(csv[colIndexPLLineOffsetBelowShoulderBaseInside]); - // } - // if (colIndexPLLineOffsetBelowDikeToeAtPolder > -1) - // { - // colIndex = colIndexPLLineOffsetBelowDikeToeAtPolder; - // locationRecord.PlLineOffsetBelowDikeToeAtPolder = Convert.ToDouble(csv[colIndexPLLineOffsetBelowDikeToeAtPolder]); - // } - // if (colIndexPlLineOffsetBelowDikeCrestMiddle > -1) - // { - // colIndex = colIndexPlLineOffsetBelowDikeCrestMiddle; - // locationRecord.PlLineOffsetBelowDikeCrestMiddle = Convert.ToDouble(csv[colIndex]); - // } - // if (colIndexPlLineOffsetFactorBelowShoulderCrest > -1) - // { - // colIndex = colIndexPlLineOffsetFactorBelowShoulderCrest; - // locationRecord.PlLineOffsetFactorBelowShoulderCrest = Convert.ToDouble(csv[colIndex]); - // } - // if (colIndexUsePlLineOffsetBelowDikeCrestMiddle > -1) - // { - // colIndex = colIndexUsePlLineOffsetBelowDikeCrestMiddle; - // locationRecord.UsePlLineOffsetBelowDikeCrestMiddle = Convert.ToBoolean(csv[colIndex]); - // } - // if (colIndexUsePlLineOffsetFactorBelowShoulderCrest > -1) - // { - // colIndex = colIndexUsePlLineOffsetFactorBelowShoulderCrest; - // locationRecord.UsePlLineOffsetFactorBelowShoulderCrest = Convert.ToBoolean(csv[colIndex]); - // } - // if (colIndexIntrusionVerticalWaterPressure > -1) - // { - // colIndex = colIndexIntrusionVerticalWaterPressure; - // locationRecord.IntrusionVerticalWaterPressure = LocationImportHelper.ToIntrusionVerticalWaterPressure(csv[colIndex]); - // } - // if (colIndexStabilityShoulderGrowSlope > -1) - // { - // colIndex = colIndexStabilityShoulderGrowSlope; - // locationRecord.StabilityShoulderGrowSlope = Convert.ToDouble(csv[colIndexStabilityShoulderGrowSlope]); - // } - // if (colIndexStabilityShoulderGrowDeltaX > -1) - // { - // colIndex = colIndexStabilityShoulderGrowDeltaX; - // locationRecord.StabilityShoulderGrowDeltaX = Convert.ToDouble(csv[colIndexStabilityShoulderGrowDeltaX]); - // } - // if (colIndexStabilitySlopeAdaptionDeltaX > -1) - // { - // colIndex = colIndexStabilitySlopeAdaptionDeltaX; - // locationRecord.StabilitySlopeAdaptionDeltaX = Convert.ToDouble(csv[colIndexStabilitySlopeAdaptionDeltaX]); - // } - // if (colIndexStabilityDesignMethod > -1) - // { - // colIndex = colIndexStabilityDesignMethod; - // locationRecord.StabilityDesignMethod = LocationImportHelper.ToStabilityDesignMethod(csv[colIndexStabilityDesignMethod]); - // } - // if (colIndexStabilityZoneType > -1) - // { - // colIndex = colIndexStabilityZoneType; - // locationRecord.StabilityZoneType = LocationImportHelper.ToMStabZonesTypeMethod(csv[colIndexStabilityZoneType]); - // } - // if (colIndexForbiddenZoneFactor > -1) - // { - // colIndex = colIndexForbiddenZoneFactor; - // locationRecord.ForbiddenZoneFactor = Convert.ToDouble(csv[colIndexForbiddenZoneFactor]); - // } - // if (colIndexSlopeAdaptionStartCotangent > -1) - // { - // colIndex = colIndexSlopeAdaptionStartCotangent; - // locationRecord.SlopeAdaptionStartCotangent = Convert.ToDouble(csv[colIndexSlopeAdaptionStartCotangent]); - // } - // if (colIndexSlopeAdaptionEndCotangent > -1) - // { - // colIndex = colIndexSlopeAdaptionEndCotangent; - // locationRecord.SlopeAdaptionEndCotangent = Convert.ToDouble(csv[colIndexSlopeAdaptionEndCotangent]); - // } - // if (colIndexSlopeAdaptionStepCotangent > -1) - // { - // colIndex = colIndexSlopeAdaptionStepCotangent; - // locationRecord.SlopeAdaptionStepCotangent = Convert.ToDouble(csv[colIndexSlopeAdaptionStepCotangent]); - // } - // if (colIndexSlopeDampingPiezometricHeightPolderSide > -1) - // { - // colIndex = colIndexSlopeDampingPiezometricHeightPolderSide; - // locationRecord.SlopeDampingPiezometricHeightPolderSide = Convert.ToDouble(csv[colIndexSlopeDampingPiezometricHeightPolderSide]); - // } - // if (colIndexNewDikeTopWidth > -1) - // { - // colIndex = colIndexNewDikeTopWidth; - // locationRecord.NewDikeTopWidth = Convert.ToDouble(csv[colIndexNewDikeTopWidth]); - // } - // if (colIndexNewDikeSlopeInside > -1) - // { - // colIndex = colIndexNewDikeSlopeInside; - // locationRecord.NewDikeSlopeInside = Convert.ToDouble(csv[colIndexNewDikeSlopeInside]); - // } - // if (colIndexNewDikeSlopeOutside > -1) - // { - // colIndex = colIndexNewDikeSlopeOutside; - // locationRecord.NewDikeSlopeOutside = Convert.ToDouble(csv[colIndexNewDikeSlopeOutside]); - // } - // if (colIndexNewShoulderTopSlope > -1) - // { - // colIndex = colIndexNewShoulderTopSlope; - // locationRecord.NewShoulderTopSlope = Convert.ToDouble(csv[colIndexNewShoulderTopSlope]); - // } - // if (colIndexNewShoulderBaseSlope > -1) - // { - // colIndex = colIndexNewShoulderBaseSlope; - // locationRecord.NewShoulderBaseSlope = Convert.ToDouble(csv[colIndexNewShoulderBaseSlope]); - // } - // if (colIndexNewMaxHeightShoulderAsFraction > -1) - // { - // colIndex = colIndexNewMaxHeightShoulderAsFraction; - // locationRecord.NewMaxHeightShoulderAsFraction = Convert.ToDouble(csv[colIndexNewMaxHeightShoulderAsFraction]); - // } - // if (colIndexNewMinDistanceDikeToeStartDitch > -1) - // { - // colIndex = colIndexNewMinDistanceDikeToeStartDitch; - // locationRecord.NewMinDistanceDikeToeStartDitch = Convert.ToDouble(csv[colIndexNewMinDistanceDikeToeStartDitch]); - // } - // if (colIndexNewWidthDitchBottom > -1) - // { - // colIndex = colIndexNewWidthDitchBottom; - // locationRecord.NewWidthDitchBottom = Convert.ToDouble(csv[colIndexNewWidthDitchBottom]); - // } - // if (colIndexNewSlopeAngleDitch > -1) - // { - // colIndex = colIndexNewSlopeAngleDitch; - // locationRecord.NewSlopeAngleDitch = Convert.ToDouble(csv[colIndexNewSlopeAngleDitch]); - // } - // if (colIndexNewDepthDitch > -1) - // { - // colIndex = colIndexNewDepthDitch; - // locationRecord.NewDepthDitch = Convert.ToDouble(csv[colIndexNewDepthDitch]); - // } - // if (colIndexUseNewDikeTopWidth > -1) - // { - // colIndex = colIndexUseNewDikeTopWidth; - // locationRecord.UseNewDikeTopWidth = Convert.ToBoolean(csv[colIndexUseNewDikeTopWidth]); - // } - // if (colIndexUseNewDikeSlopeInside > -1) - // { - // colIndex = colIndexUseNewDikeSlopeInside; - // locationRecord.UseNewDikeSlopeInside = Convert.ToBoolean(csv[colIndexUseNewDikeSlopeInside]); - // } - // if (colIndexUseNewDikeSlopeOutside > -1) - // { - // colIndex = colIndexUseNewDikeSlopeOutside; - // locationRecord.UseNewDikeSlopeOutside = Convert.ToBoolean(csv[colIndexUseNewDikeSlopeOutside]); - // } - // if (colIndexUseNewShoulderTopSlope > -1) - // { - // colIndex = colIndexUseNewShoulderTopSlope; - // locationRecord.UseNewShoulderTopSlope = Convert.ToBoolean(csv[colIndexUseNewShoulderTopSlope]); - // } - // if (colIndexUseNewShoulderBaseSlope > -1) - // { - // colIndex = colIndexUseNewShoulderBaseSlope; - // locationRecord.UseNewShoulderBaseSlope = Convert.ToBoolean(csv[colIndexUseNewShoulderBaseSlope]); - // } - // if (colIndexUseNewMaxHeightShoulderAsFraction > -1) - // { - // colIndex = colIndexUseNewMaxHeightShoulderAsFraction; - // locationRecord.UseNewMaxHeightShoulderAsFraction = Convert.ToBoolean(csv[colIndexUseNewMaxHeightShoulderAsFraction]); - // } - // if (colIndexUseNewMinDistanceDikeToeStartDitch > -1) - // { - // colIndex = colIndexUseNewMinDistanceDikeToeStartDitch; - // locationRecord.UseNewMinDistanceDikeToeStartDitch = Convert.ToBoolean(csv[colIndexUseNewMinDistanceDikeToeStartDitch]); - // } - // if (colIndexUseNewDitchDefinition > -1) - // { - // colIndex = colIndexUseNewDitchDefinition; - // locationRecord.UseNewDitchDefinition = Convert.ToBoolean(csv[colIndexUseNewDitchDefinition]); - // } - // if (colIndexDikeTableHeight > -1) - // { - // colIndex = colIndexDikeTableHeight; - // locationRecord.DikeTableHeight = Convert.ToDouble(csv[colIndexDikeTableHeight]); - // } - // if (colIndexRiverLevel > -1) - // { - // colIndex = colIndexRiverLevel; - // locationRecord.RiverLevel = Convert.ToDouble(csv[colIndexRiverLevel]); - // } - // if (colIndexRiverLevelLow > -1) - // { - // colIndex = colIndexRiverLevelLow; - // locationRecord.RiverLevelLow = Convert.ToDouble(csv[colIndexRiverLevelLow]); - // } - // locationRecords.Add(locationRecord); - // } - // catch (Exception e) - // { - // var csvLocationError = String.Format("Tijdens het lezen van locatie { 0} in kolom { 1} uit de csv trad de volgende fout op: ", locationRecord.LocationId, colIndex + 1); - // errorMessages.Add(csvLocationError + e.Message); - // } - // } - // } + Thread.CurrentThread.CurrentCulture = CsvReaderUtilities.DetermineCultureForFile(fileName); + using (var csv = new CsvReader(new StreamReader(fileName), true, ';')) + { + string[] headers = CsvImporterHelper.GetFieldHeaders(this, csv); + if (headers.Count() < 3) + { + var csvHeaderError = "De header van de csv file klopt niet."; + throw new ArgumentException(string.Format("{0} : {1}", fileName, csvHeaderError)); + } + + int colIndexLocationId = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.LocationColumnName); + int colIndexSurfaceLineId = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.SurfaceLineColumnName); + int colIndexXSoilGeometry2DOrigin = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.XSoilGeometry2DOriginColumnName); + int colIndexOphoogMateriaaldijk = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.DikeEmbankmentMaterialColumnName); + int colSoilGeometryName = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.SoilGeometryNameColumnName); + int colLayerMaterial = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.LayerMaterialColumnName); + int colLayerThickness = CsvReaderUtilities.GetHeaderIndexByString(headers, CsvColumnNames.LayerThicknessColumnName); + + var index = 1; + while (csv.ReadNextRecord()) + { + var locationRecord = new LocationRecord(); + var colIndex = -1; // Keep track of column for error message + try + { + locationRecord.LocationRecordId = index++; + + /* + * Required columns + */ + locationRecord.LocationId = csv[colIndexLocationId]; + locationRecord.SurfaceLineId = csv[colIndexSurfaceLineId]; + locationRecord.DikeEmbankmentMaterial = csv[colIndexOphoogMateriaaldijk]; + locationRecord.SoilGeometryName = csv[colSoilGeometryName]; + locationRecord.LayerMaterial = csv[colLayerMaterial]; + locationRecord.LayerThickness = Convert.ToDouble(csv[colLayerThickness]); + + /* + * Optional column + */ + if (colIndexXSoilGeometry2DOrigin > -1) + { + colIndex = colIndexXSoilGeometry2DOrigin; + locationRecord.XSoilGeometry2DOrigin = Convert.ToDouble(csv[colIndexXSoilGeometry2DOrigin]); + } + locationRecords.Add(locationRecord); + } + catch (Exception e) + { + var csvLocationError = String.Format("Tijdens het lezen van locatie {0} in kolom {1} uit de csv trad de volgende fout op: ", + locationRecord.LocationId, colIndex + 1); + errorMessages.Add(csvLocationError + e.Message); + } + } + } } finally { Thread.CurrentThread.CurrentCulture = oldcur; } } + + /// + /// Gets the imported items. + /// + /// + /// The imported items. + /// public List ImportedItems { get { return locationRecords; } } + /// + /// Gets the error messages. + /// + /// + /// The error messages. + /// public List ErrorMessages { get { return errorMessages; } - set { errorMessages = value; } } } } Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/MissingFieldAction.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/MissingFieldAction.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/MissingFieldAction.cs (revision 3110) @@ -0,0 +1,44 @@ +// LumenWorks.Framework.IO.CSV.MissingFieldAction +// Copyright (c) 2006 S�bastien Lorion +// +// MIT license (http://en.wikipedia.org/wiki/MIT_License) +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do so, +// subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +namespace LumenWorks.Framework.IO.Csv +{ + /// + /// Specifies the action to take when a field is missing. + /// + public enum MissingFieldAction + { + /// + /// Treat as a parsing error. + /// + ParseError = 0, + + /// + /// Replaces by an empty value. + /// + ReplaceByEmpty = 1, + + /// + /// Replaces by a null value (). + /// + ReplaceByNull = 2, + } +} \ No newline at end of file Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/ValueTrimmingOptions.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/ValueTrimmingOptions.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/ValueTrimmingOptions.cs (revision 3110) @@ -0,0 +1,13 @@ +using System; + +namespace LumenWorks.Framework.IO.Csv +{ + [Flags] + public enum ValueTrimmingOptions + { + None = 0, + UnquotedOnly = 1, + QuotedOnly = 2, + All = UnquotedOnly | QuotedOnly + } +} \ No newline at end of file Index: DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvReaderUtilities.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvReaderUtilities.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvReaderUtilities.cs (revision 3110) @@ -0,0 +1,170 @@ +// Copyright (C) Stichting Deltares 2020. All rights reserved. +// +// This file is part of the LayerOnSlopeTool +// +// The LayerOnSlopeTool is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . +// +// All names, logos, and references to "Deltares" are registered trademarks of +// Stichting Deltares and remain full property of Stichting Deltares at all times. +// All rights reserved. + +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Linq; + +namespace Deltares.LayerOnSlopeTool.Data.CsvImporters +{ + /// + /// Holds the utility functions for reading csv files. + /// + public class CsvReaderUtilities + { + /// + /// Ensures the proper header for dynamic record length in CSV file. + /// + /// Name of the file. + public static void EnsureProperHeaderForDynamicRecordLengthInCsvFile(string fileName) + { + if (!File.Exists(fileName)) + { + return; + } + + var maxFields = 0; + var fileContents = new List(); + + using (var sr = new StreamReader(fileName)) + { + String input; + while ((input = sr.ReadLine()) != null) + { + fileContents.Add(input); + string[] items = input.Split(';'); + + maxFields = Math.Max(maxFields, items.Count()); + } + } + + if (maxFields > 0) + { + var header = fileContents[0]; + string[] items = header.Split(';'); + for (var i = items.Count(); i < maxFields; i++) + { + header += string.Format("A{0};", i); + } + fileContents[0] = header; + } + + using (var outfile = new StreamWriter(fileName)) + { + foreach (var line in fileContents) + { + outfile.WriteLine(line); + } + } + } + + /// + /// Determines the culture for file. + /// + /// Name of the file. + /// + public static CultureInfo DetermineCultureForFile(string fileName) + { + if (!File.Exists(fileName)) + { + return null; + } + + var fileContents = new List(); + bool isDetermined = false; + CultureInfo cultureInfo = null; + using (var sr = new StreamReader(fileName)) + { + String input; + while (((input = sr.ReadLine()) != null) && (!isDetermined)) + { + fileContents.Add(input); + string[] items = input.Split(';'); + foreach (var item in items) + { + try + { + if (item.Contains('.')) + { + var pi = item.IndexOf('.'); + // check for only one point + if (pi == item.LastIndexOf('.')) + { + // point must be followed by a number + if (Char.IsDigit(item[pi + 1])) + { + cultureInfo = new CultureInfo(CultureInfo.InvariantCulture.LCID, false); + isDetermined = true; + break; + } + } + } + else + { + if (item.Contains(',')) + { + var ci = item.IndexOf(','); + // check for only one comma + if (ci == item.LastIndexOf(',')) + { + // comma must be followed by a number + if (Char.IsDigit(item[ci + 1])) + { + cultureInfo = new CultureInfo("nl-NL", false); + isDetermined = true; + break; + } + } + } + } + } + catch (Exception) + { + // This meant to be empty! + } + } + } + } + var returnCulture = cultureInfo ?? new CultureInfo("en-US", false); + return returnCulture; + } + + /// + /// Match a column name to the header of the csv file and get the corresponding index. + /// + /// All headers a appearing in the csv file. + /// Header to retrieve index from. + /// Last matching item index in ; -1 if not match found. + public static int GetHeaderIndexByString(string[] headers, string headerName) + { + for (int i = 0; i < headers.Count(); i++) + { + if (headerName.ToUpper() == headers[i].ToUpper()) + { + return i; + } + } + return -1; + } + } +} Index: DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/SurfaceLinePoint.cs =================================================================== diff -u -r3102 -r3110 --- DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/SurfaceLinePoint.cs (.../SurfaceLinePoint.cs) (revision 3102) +++ DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/SurfaceLinePoint.cs (.../SurfaceLinePoint.cs) (revision 3110) @@ -1,8 +1,8 @@ // Copyright (C) Stichting Deltares 2020. All rights reserved. // -// This file is part of the Dam Engine. +// This file is part of the LayerOnSlopeTool // -// The Dam Engine is free software: you can redistribute it and/or modify +// The LayerOnSlopeTool is free software: you can redistribute it and/or modify // it under the terms of the GNU Affero General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. Index: DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvImporterCharacteristicPoints.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvImporterCharacteristicPoints.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvImporterCharacteristicPoints.cs (revision 3110) @@ -0,0 +1,509 @@ +// Copyright (C) Stichting Deltares 2020. All rights reserved. +// +// This file is part of the LayerOnSlopeTool +// +// The LayerOnSlopeTool is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . +// +// All names, logos, and references to "Deltares" are registered trademarks of +// Stichting Deltares and remain full property of Stichting Deltares at all times. +// All rights reserved. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading; +using LumenWorks.Framework.IO.Csv; + +namespace Deltares.LayerOnSlopeTool.Data.CsvImporters +{ + /// + /// Holds the importer for the characteristic points from csv files + /// + public class CsvImporterCharacteristicPoints + { + private List characteristicPointsRecords = new List(); + private List errorMessages = new List(); + + public class CharPoint + { + public CharacteristicPointType Type { get; set; } + public double X { get; set; } + public double Y { get; set; } + public double Z { get; set; } + } + + public class CharacteristicPointsRecord + { + private IList points = new List(); + public string SurfaceLineId { get; set; } + + public IList Points + { + get { return points; } + set { points = value; } + } + + public int Volgnummer { get; set; } + } + + private void CheckColumn(int index, string fileName, string fieldName) + { + if (index < 0) + { + var csvHeaderFieldError = "In de header ontbreekt veld:"; + throw new ArgumentException(string.Format("{0} : {1} {2}", fileName, csvHeaderFieldError, fieldName)); + } + } + + public CsvImporterCharacteristicPoints(string fileName) + { + const int minimalNumberOfColumns = 49; // Column 53 "volgnummer" is not mandatory; DTH removed + errorMessages.Clear(); + if (fileName == "") + { + throw new ArgumentException("De filenaam voor de CharacteristicPoints csv is leeg."); + } + + if (!File.Exists(fileName)) + { + throw new ArgumentException(string.Format("De CharacteristicPoints csv file met naam {0} bestaat niet.", fileName)); + } + + var oldcur = Thread.CurrentThread.CurrentCulture; + try + { + Thread.CurrentThread.CurrentCulture = CsvReaderUtilities.DetermineCultureForFile(fileName); + using (CsvReader csv = new CsvReader(new StreamReader(fileName), true, ';')) + { + string[] headers = CsvImporterHelper.GetFieldHeaders(this, csv); + if (headers.Count() < minimalNumberOfColumns) + { + var csvHeaderColumnCountError = string.Format("Miminum aantal kolommen in csv bestand is {0}", minimalNumberOfColumns); + throw new ArgumentException(string.Format("{0} : {1}", fileName, csvHeaderColumnCountError)); + } + + int surfaceLineIdCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.LocationId); + if (surfaceLineIdCol < 0) + { + // colIndexSurfaceLineId can be defined with 2 identifiers (surfaceLineId is deprecated, locationId is the new one) + surfaceLineIdCol = CsvReaderUtilities.GetHeaderIndexByString(headers, + CharacteristicPointCsvIdentifiers.SurfaceLineId); + } + CheckColumn(surfaceLineIdCol, fileName, CharacteristicPointCsvIdentifiers.SurfaceLineId); + + int xMaaiveldBinnenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, + CharacteristicPointCsvIdentifiers + .SurfaceLevelInsideX); + CheckColumn(xMaaiveldBinnenwaartsCol, fileName, + CharacteristicPointCsvIdentifiers.SurfaceLevelInsideX); + + int yMaaiveldBinnenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, + CharacteristicPointCsvIdentifiers + .SurfaceLevelInsideY); + CheckColumn(yMaaiveldBinnenwaartsCol, fileName, + CharacteristicPointCsvIdentifiers.SurfaceLevelInsideY); + + int zMaaiveldBinnenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, + CharacteristicPointCsvIdentifiers + .SurfaceLevelInsideZ); + CheckColumn(zMaaiveldBinnenwaartsCol, fileName, + CharacteristicPointCsvIdentifiers.SurfaceLevelInsideZ); + + int xInsteekSlootPolderzijdeCol = CsvReaderUtilities.GetHeaderIndexByString(headers, + CharacteristicPointCsvIdentifiers + .DitchPolderSideX); + CheckColumn(xInsteekSlootPolderzijdeCol, fileName, + CharacteristicPointCsvIdentifiers.DitchPolderSideX); + + int yInsteekSlootPolderzijdeCol = CsvReaderUtilities.GetHeaderIndexByString(headers, + CharacteristicPointCsvIdentifiers + .DitchPolderSideY); + CheckColumn(yInsteekSlootPolderzijdeCol, fileName, + CharacteristicPointCsvIdentifiers.DitchPolderSideY); + + int zInsteekSlootPolderzijdeCol = CsvReaderUtilities.GetHeaderIndexByString(headers, + CharacteristicPointCsvIdentifiers + .DitchPolderSideZ); + CheckColumn(zInsteekSlootPolderzijdeCol, fileName, + CharacteristicPointCsvIdentifiers.DitchPolderSideZ); + + int xSlootbodemPolderzijdeCol = CsvReaderUtilities.GetHeaderIndexByString(headers, + CharacteristicPointCsvIdentifiers + .BottomDitchPolderSideX); + CheckColumn(xSlootbodemPolderzijdeCol, fileName, + CharacteristicPointCsvIdentifiers.BottomDitchPolderSideX); + + int ySlootbodemPolderzijdeCol = CsvReaderUtilities.GetHeaderIndexByString(headers, + CharacteristicPointCsvIdentifiers + .BottomDitchPolderSideY); + CheckColumn(ySlootbodemPolderzijdeCol, fileName, + CharacteristicPointCsvIdentifiers.BottomDitchPolderSideY); + + int zSlootbodemPolderzijdeCol = CsvReaderUtilities.GetHeaderIndexByString(headers, + CharacteristicPointCsvIdentifiers + .BottomDitchPolderSideZ); + CheckColumn(zSlootbodemPolderzijdeCol, fileName, + CharacteristicPointCsvIdentifiers.BottomDitchPolderSideZ); + + int xSlootbodemDijkzijdeCol = CsvReaderUtilities.GetHeaderIndexByString(headers, + CharacteristicPointCsvIdentifiers + .BottomDitchDikeSideX); + CheckColumn(xSlootbodemDijkzijdeCol, fileName, + CharacteristicPointCsvIdentifiers.BottomDitchDikeSideX); + + int ySlootbodemDijkzijdeCol = CsvReaderUtilities.GetHeaderIndexByString(headers, + CharacteristicPointCsvIdentifiers + .BottomDitchDikeSideY); + CheckColumn(ySlootbodemDijkzijdeCol, fileName, + CharacteristicPointCsvIdentifiers.BottomDitchDikeSideY); + + int zSlootbodemDijkzijdeCol = CsvReaderUtilities.GetHeaderIndexByString(headers, + CharacteristicPointCsvIdentifiers + .BottomDitchDikeSideZ); + CheckColumn(zSlootbodemDijkzijdeCol, fileName, + CharacteristicPointCsvIdentifiers.BottomDitchDikeSideZ); + + int xInsteekSlootDijkzijdeCol = CsvReaderUtilities.GetHeaderIndexByString(headers, + CharacteristicPointCsvIdentifiers + .DitchDikeSideX); + CheckColumn(xInsteekSlootDijkzijdeCol, fileName, CharacteristicPointCsvIdentifiers.DitchDikeSideX); + + int yInsteekSlootDijkzijdeCol = CsvReaderUtilities.GetHeaderIndexByString(headers, + CharacteristicPointCsvIdentifiers + .DitchDikeSideY); + // The click program by Erik Vastenburg now delivers "Y_Insteek_sloot dijkzijde" instead of the correct above definition + // Erik will change this in his program but for now, this wrong tag should also be recognized. + if (yInsteekSlootDijkzijdeCol < 0) + { + const string fieldYInsteekslootdijkzijde = "Y_Insteek_sloot dijkzijde"; + yInsteekSlootDijkzijdeCol = CsvReaderUtilities.GetHeaderIndexByString(headers, + fieldYInsteekslootdijkzijde); + CheckColumn(yInsteekSlootDijkzijdeCol, fileName, fieldYInsteekslootdijkzijde); + } + else + { + CheckColumn(yInsteekSlootDijkzijdeCol, fileName, + CharacteristicPointCsvIdentifiers.DitchDikeSideY); + } + + int zInsteekSlootDijkzijdeCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.DitchDikeSideZ); + CheckColumn(zInsteekSlootDijkzijdeCol, fileName, CharacteristicPointCsvIdentifiers.DitchDikeSideZ); + + int xTeenDijkBinnenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.DikeToeAtPolderX); + CheckColumn(xTeenDijkBinnenwaartsCol, fileName, CharacteristicPointCsvIdentifiers.DikeToeAtPolderX); + + int yTeenDijkBinnenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.DikeToeAtPolderY); + CheckColumn(yTeenDijkBinnenwaartsCol, fileName, CharacteristicPointCsvIdentifiers.DikeToeAtPolderY); + + int zTeenDijkBinnenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.DikeToeAtPolderZ); + CheckColumn(zTeenDijkBinnenwaartsCol, fileName, CharacteristicPointCsvIdentifiers.DikeToeAtPolderZ); + + int xKruinBinnenbermCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.TopShoulderInsideX); + CheckColumn(xKruinBinnenbermCol, fileName, CharacteristicPointCsvIdentifiers.TopShoulderInsideX); + + int yKruinBinnenbermCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.TopShoulderInsideY); + CheckColumn(yKruinBinnenbermCol, fileName, CharacteristicPointCsvIdentifiers.TopShoulderInsideY); + + int zKruinBinnenbermCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.TopShoulderInsideZ); + CheckColumn(zKruinBinnenbermCol, fileName, CharacteristicPointCsvIdentifiers.TopShoulderInsideZ); + + int xInsteekBinnenbermCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.InsteekShoulderInsideX); + CheckColumn(xInsteekBinnenbermCol, fileName, CharacteristicPointCsvIdentifiers.InsteekShoulderInsideX); + + int yInsteekBinnenbermCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.InsteekShoulderInsideY); + CheckColumn(yInsteekBinnenbermCol, fileName, CharacteristicPointCsvIdentifiers.InsteekShoulderInsideY); + + int zInsteekBinnenbermCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.InsteekShoulderInsideZ); + CheckColumn(zInsteekBinnenbermCol, fileName, CharacteristicPointCsvIdentifiers.InsteekShoulderInsideZ); + + int xKruinBinnentaludCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.DikeTopAtPolderX); + CheckColumn(xKruinBinnentaludCol, fileName, CharacteristicPointCsvIdentifiers.DikeTopAtPolderX); + + int yKruinBinnentaludCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.DikeTopAtPolderY); + CheckColumn(yKruinBinnentaludCol, fileName, CharacteristicPointCsvIdentifiers.DikeTopAtPolderY); + + int zKruinBinnentaludCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.DikeTopAtPolderZ); + CheckColumn(zKruinBinnentaludCol, fileName, CharacteristicPointCsvIdentifiers.DikeTopAtPolderZ); + + int xVerkeersbelastingKantBinnenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.TrafficLoadInsideX); + CheckColumn(xVerkeersbelastingKantBinnenwaartsCol, fileName, CharacteristicPointCsvIdentifiers.TrafficLoadInsideX); + + int yVerkeersbelastingKantBinnenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.TrafficLoadInsideY); + CheckColumn(yVerkeersbelastingKantBinnenwaartsCol, fileName, + CharacteristicPointCsvIdentifiers.TrafficLoadInsideY); + + int zVerkeersbelastingKantBinnenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.TrafficLoadInsideZ); + CheckColumn(zVerkeersbelastingKantBinnenwaartsCol, fileName, CharacteristicPointCsvIdentifiers.TrafficLoadInsideZ); + + int xVerkeersbelastingKantBuitenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.TrafficLoadOutsideX); + CheckColumn(xVerkeersbelastingKantBuitenwaartsCol, fileName, CharacteristicPointCsvIdentifiers.TrafficLoadOutsideX); + + int yVerkeersbelastingKantBuitenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.TrafficLoadOutsideY); + CheckColumn(yVerkeersbelastingKantBuitenwaartsCol, fileName, CharacteristicPointCsvIdentifiers.TrafficLoadOutsideY); + + int zVerkeersbelastingKantBuitenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.TrafficLoadOutsideZ); + CheckColumn(zVerkeersbelastingKantBuitenwaartsCol, fileName, CharacteristicPointCsvIdentifiers.TrafficLoadOutsideZ); + + int xKruinBuitentaludCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.DikeTopAtRiverX); + CheckColumn(xKruinBuitentaludCol, fileName, CharacteristicPointCsvIdentifiers.DikeTopAtRiverX); + + int yKruinBuitentaludCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.DikeTopAtRiverY); + CheckColumn(yKruinBuitentaludCol, fileName, CharacteristicPointCsvIdentifiers.DikeTopAtRiverY); + + int zKruinBuitentaludCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.DikeTopAtRiverZ); + CheckColumn(zKruinBuitentaludCol, fileName, CharacteristicPointCsvIdentifiers.DikeTopAtRiverZ); + + int xInsteekBuitenbermCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.InsteekShoulderOutsideX); + CheckColumn(xInsteekBuitenbermCol, fileName, CharacteristicPointCsvIdentifiers.InsteekShoulderOutsideX); + + int yInsteekBuitenbermCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.InsteekShoulderOutsideY); + CheckColumn(yInsteekBuitenbermCol, fileName, CharacteristicPointCsvIdentifiers.InsteekShoulderOutsideY); + + int zInsteekBuitenbermCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.InsteekShoulderOutsideZ); + CheckColumn(zInsteekBuitenbermCol, fileName, CharacteristicPointCsvIdentifiers.InsteekShoulderOutsideZ); + + int xKruinBuitenbermCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.TopShoulderOutsideX); + CheckColumn(xKruinBuitenbermCol, fileName, CharacteristicPointCsvIdentifiers.TopShoulderOutsideX); + + int yKruinBuitenbermCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.TopShoulderOutsideY); + CheckColumn(yKruinBuitenbermCol, fileName, CharacteristicPointCsvIdentifiers.TopShoulderOutsideY); + + int zKruinBuitenbermCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.TopShoulderOutsideZ); + CheckColumn(zKruinBuitenbermCol, fileName, CharacteristicPointCsvIdentifiers.TopShoulderOutsideZ); + + int xTeenDijkBuitenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.DikeToeAtRiverX); + CheckColumn(xTeenDijkBuitenwaartsCol, fileName, CharacteristicPointCsvIdentifiers.DikeToeAtRiverX); + + int yTeenDijkBuitenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.DikeToeAtRiverY); + CheckColumn(yTeenDijkBuitenwaartsCol, fileName, CharacteristicPointCsvIdentifiers.DikeToeAtRiverY); + + int zTeenDijkBuitenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.DikeToeAtRiverZ); + CheckColumn(zTeenDijkBuitenwaartsCol, fileName, CharacteristicPointCsvIdentifiers.DikeToeAtRiverZ); + + int xMaaiveldBuitenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.SurfaceLevelOutsideX); + CheckColumn(xMaaiveldBuitenwaartsCol, fileName, + CharacteristicPointCsvIdentifiers.SurfaceLevelOutsideX); + + int yMaaiveldBuitenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.SurfaceLevelOutsideY); + CheckColumn(yMaaiveldBuitenwaartsCol, fileName, + CharacteristicPointCsvIdentifiers.SurfaceLevelOutsideY); + + int zMaaiveldBuitenwaartsCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.SurfaceLevelOutsideZ); + CheckColumn(zMaaiveldBuitenwaartsCol, fileName, CharacteristicPointCsvIdentifiers.SurfaceLevelOutsideZ); + + //optional, do not check + int xInsteekGeulCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.InsertRiverChannelX); + //optional, do not check + int yInsteekGeulCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.InsertRiverChannelY); + //optional, do not check + int zInsteekGeulCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.InsertRiverChannelZ); + + //optional, do not check + int xTeenGeulCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.BottomRiverChannelX); + //optional, do not check + int yTeenGeulCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.BottomRiverChannelY); + //optional, do not check + int zTeenGeulCol = CsvReaderUtilities.GetHeaderIndexByString(headers, CharacteristicPointCsvIdentifiers.BottomRiverChannelZ); + + // optional so do not check + const string fieldVolgnummer = "Volgnummer"; + int volgnummerCol = CsvReaderUtilities.GetHeaderIndexByString(headers, fieldVolgnummer); + + + while (csv.ReadNextRecord()) + { + try + { + CharacteristicPointsRecord characteristicPoints = new CharacteristicPointsRecord(); + characteristicPoints.SurfaceLineId = csv[surfaceLineIdCol]; + if (volgnummerCol > -1) + { + characteristicPoints.Volgnummer = Convert.ToInt32(csv[volgnummerCol]); + } + + CharPoint mbi = new CharPoint(); + mbi.Type = CharacteristicPointType.SurfaceLevelInside; + mbi.X = Convert.ToDouble(csv[xMaaiveldBinnenwaartsCol]); + mbi.Y = Convert.ToDouble(csv[yMaaiveldBinnenwaartsCol]); + mbi.Z = Convert.ToDouble(csv[zMaaiveldBinnenwaartsCol]); + characteristicPoints.Points.Add(mbi); + + CharPoint isp = new CharPoint(); + isp.Type = CharacteristicPointType.DitchPolderSide; + isp.X = Convert.ToDouble(csv[xInsteekSlootPolderzijdeCol]); + isp.Y = Convert.ToDouble(csv[yInsteekSlootPolderzijdeCol]); + isp.Z = Convert.ToDouble(csv[zInsteekSlootPolderzijdeCol]); + characteristicPoints.Points.Add(isp); + + CharPoint sbp = new CharPoint(); + sbp.Type = CharacteristicPointType.BottomDitchPolderSide; + sbp.X = Convert.ToDouble(csv[xSlootbodemPolderzijdeCol]); + sbp.Y = Convert.ToDouble(csv[ySlootbodemPolderzijdeCol]); + sbp.Z = Convert.ToDouble(csv[zSlootbodemPolderzijdeCol]); + characteristicPoints.Points.Add(sbp); + + CharPoint sbd = new CharPoint(); + sbd.Type = CharacteristicPointType.BottomDitchDikeSide; + sbd.X = Convert.ToDouble(csv[xSlootbodemDijkzijdeCol]); + sbd.Y = Convert.ToDouble(csv[ySlootbodemDijkzijdeCol]); + sbd.Z = Convert.ToDouble(csv[zSlootbodemDijkzijdeCol]); + characteristicPoints.Points.Add(sbd); + + CharPoint isd = new CharPoint(); + isd.Type = CharacteristicPointType.DitchDikeSide; + isd.X = Convert.ToDouble(csv[xInsteekSlootDijkzijdeCol]); + isd.Y = Convert.ToDouble(csv[yInsteekSlootDijkzijdeCol]); + isd.Z = Convert.ToDouble(csv[zInsteekSlootDijkzijdeCol]); + characteristicPoints.Points.Add(isd); + + CharPoint tdbi = new CharPoint(); + tdbi.Type = CharacteristicPointType.DikeToeAtPolder; + tdbi.X = Convert.ToDouble(csv[xTeenDijkBinnenwaartsCol]); + tdbi.Y = Convert.ToDouble(csv[yTeenDijkBinnenwaartsCol]); + tdbi.Z = Convert.ToDouble(csv[zTeenDijkBinnenwaartsCol]); + characteristicPoints.Points.Add(tdbi); + + CharPoint kbb = new CharPoint(); + kbb.Type = CharacteristicPointType.ShoulderTopInside; + kbb.X = Convert.ToDouble(csv[xKruinBinnenbermCol]); + kbb.Y = Convert.ToDouble(csv[yKruinBinnenbermCol]); + kbb.Z = Convert.ToDouble(csv[zKruinBinnenbermCol]); + characteristicPoints.Points.Add(kbb); + + CharPoint ibb = new CharPoint(); + ibb.Type = CharacteristicPointType.ShoulderBaseInside; + ibb.X = Convert.ToDouble(csv[xInsteekBinnenbermCol]); + ibb.Y = Convert.ToDouble(csv[yInsteekBinnenbermCol]); + ibb.Z = Convert.ToDouble(csv[zInsteekBinnenbermCol]); + characteristicPoints.Points.Add(ibb); + + CharPoint kbt = new CharPoint(); + kbt.Type = CharacteristicPointType.DikeTopAtPolder; + kbt.X = Convert.ToDouble(csv[xKruinBinnentaludCol]); + kbt.Y = Convert.ToDouble(csv[yKruinBinnentaludCol]); + kbt.Z = Convert.ToDouble(csv[zKruinBinnentaludCol]); + characteristicPoints.Points.Add(kbt); + + CharPoint vbbi = new CharPoint(); + vbbi.Type = CharacteristicPointType.TrafficLoadInside; + vbbi.X = Convert.ToDouble(csv[xVerkeersbelastingKantBinnenwaartsCol]); + vbbi.Y = Convert.ToDouble(csv[yVerkeersbelastingKantBinnenwaartsCol]); + vbbi.Z = Convert.ToDouble(csv[zVerkeersbelastingKantBinnenwaartsCol]); + characteristicPoints.Points.Add(vbbi); + + CharPoint vbbu = new CharPoint(); + vbbu.Type = CharacteristicPointType.TrafficLoadOutside; + vbbu.X = Convert.ToDouble(csv[xVerkeersbelastingKantBuitenwaartsCol]); + vbbu.Y = Convert.ToDouble(csv[yVerkeersbelastingKantBuitenwaartsCol]); + vbbu.Z = Convert.ToDouble(csv[zVerkeersbelastingKantBuitenwaartsCol]); + characteristicPoints.Points.Add(vbbu); + + CharPoint kbut = new CharPoint(); + kbut.Type = CharacteristicPointType.DikeTopAtRiver; + kbut.X = Convert.ToDouble(csv[xKruinBuitentaludCol]); + kbut.Y = Convert.ToDouble(csv[yKruinBuitentaludCol]); + kbut.Z = Convert.ToDouble(csv[zKruinBuitentaludCol]); + characteristicPoints.Points.Add(kbut); + + CharPoint ibub = new CharPoint(); + ibub.Type = CharacteristicPointType.ShoulderBaseOutside; + ibub.X = Convert.ToDouble(csv[xInsteekBuitenbermCol]); + ibub.Y = Convert.ToDouble(csv[yInsteekBuitenbermCol]); + ibub.Z = Convert.ToDouble(csv[zInsteekBuitenbermCol]); + characteristicPoints.Points.Add(ibub); + + CharPoint kbub = new CharPoint(); + kbub.Type = CharacteristicPointType.ShoulderTopOutside; + kbub.X = Convert.ToDouble(csv[xKruinBuitenbermCol]); + kbub.Y = Convert.ToDouble(csv[yKruinBuitenbermCol]); + kbub.Z = Convert.ToDouble(csv[zKruinBuitenbermCol]); + characteristicPoints.Points.Add(kbub); + + CharPoint tdbu = new CharPoint(); + tdbu.Type = CharacteristicPointType.DikeToeAtRiver; + tdbu.X = Convert.ToDouble(csv[xTeenDijkBuitenwaartsCol]); + tdbu.Y = Convert.ToDouble(csv[yTeenDijkBuitenwaartsCol]); + tdbu.Z = Convert.ToDouble(csv[zTeenDijkBuitenwaartsCol]); + characteristicPoints.Points.Add(tdbu); + + CharPoint mbu = new CharPoint(); + mbu.Type = CharacteristicPointType.SurfaceLevelOutside; + mbu.X = Convert.ToDouble(csv[xMaaiveldBuitenwaartsCol]); + mbu.Y = Convert.ToDouble(csv[yMaaiveldBuitenwaartsCol]); + mbu.Z = Convert.ToDouble(csv[zMaaiveldBuitenwaartsCol]); + characteristicPoints.Points.Add(mbu); + + if (xTeenGeulCol > 0) + { + CharPoint brc = new CharPoint(); + brc.Type = CharacteristicPointType.BottomRiverChannel; + brc.X = Convert.ToDouble(csv[xTeenGeulCol]); + brc.Y = Convert.ToDouble(csv[yTeenGeulCol]); + brc.Z = Convert.ToDouble(csv[zTeenGeulCol]); + characteristicPoints.Points.Add(brc); + } + + if (xInsteekGeulCol > 0) + { + CharPoint irc = new CharPoint(); + irc.Type = CharacteristicPointType.InsertRiverChannel; + irc.X = Convert.ToDouble(csv[xInsteekGeulCol]); + irc.Y = Convert.ToDouble(csv[yInsteekGeulCol]); + irc.Z = Convert.ToDouble(csv[zInsteekGeulCol]); + characteristicPoints.Points.Add(irc); + } + + characteristicPointsRecords.Add(characteristicPoints); + } + catch (Exception e) + { + var csvCharacteristicPointError = + "Tijdens het lezen van karakteristieke punten in locatie {0} in kolom {1} uit de csv trad de volgende fout op: "; + errorMessages.Add(csvCharacteristicPointError + e.Message); + } + } + } + } + finally + { + Thread.CurrentThread.CurrentCulture = oldcur; + } + } + + /// + /// Gets the imported items. + /// + /// + /// The imported items. + /// + public List ImportedItems + { + get { return characteristicPointsRecords; } + } + + /// + /// Gets the error messages. + /// + /// + /// The error messages. + /// + public List ErrorMessages + { + get { return errorMessages; } + set { errorMessages = value; } + } + + } +} Index: DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvImporterHelper.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvImporterHelper.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvImporterHelper.cs (revision 3110) @@ -0,0 +1,51 @@ +// Copyright (C) Stichting Deltares 2020. All rights reserved. +// +// This file is part of the LayerOnSlopeTool +// +// The LayerOnSlopeTool is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . +// +// All names, logos, and references to "Deltares" are registered trademarks of +// Stichting Deltares and remain full property of Stichting Deltares at all times. +// All rights reserved. + +using System; +using LumenWorks.Framework.IO.Csv; + +namespace Deltares.LayerOnSlopeTool.Data.CsvImporters +{ + /// + /// Holds the helper functions for reading csv files. + /// + public class CsvImporterHelper + { + /// + /// Read field headers of csv file with improved error messages + /// + /// the calling object + /// the scv reader + /// + static public string[] GetFieldHeaders(Object caller, CsvReader csvReader) + { + try + { + return csvReader.GetFieldHeaders(); + } + catch (Exception exception) + { + string message = string.Format("Fout bij het lezen van de CSV kolom namen; controleer op dubbele namen ('{0}' exception in {1})", exception.Message, caller.GetType()); + throw new ArgumentException(message); + } + } + } +} Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/LumenWorks.Framework.IO.csproj =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/LumenWorks.Framework.IO.csproj (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/LumenWorks.Framework.IO.csproj (revision 3110) @@ -0,0 +1,87 @@ + + + + Debug + AnyCPU + 9.0.30729 + 2.0 + {E3B83D68-C90C-4874-8E5F-DEC8D038BC7C} + Library + Properties + LumenWorks.Framework.IO + LumenWorks.Framework.IO + v4.5 + 512 + + + + + + + + + true + LumenWorks.Framework.snk + + + 3.5 + + + + + true + ..\..\bin\Debug\ + DEBUG;TRACE + full + x86 + prompt + false + + + ..\..\bin\Release\ + TRACE + bin\Release\LumenWorks.Framework.IO.XML + true + pdbonly + x86 + prompt + 1 + false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CachedCsvReader.CsvBindingList.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CachedCsvReader.CsvBindingList.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CachedCsvReader.CsvBindingList.cs (revision 3110) @@ -0,0 +1,404 @@ +// LumenWorks.Framework.IO.CSV.CachedCsvReader.CsvBindingList +// Copyright (c) 2006 S�bastien Lorion +// +// MIT license (http://en.wikipedia.org/wiki/MIT_License) +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do so, +// subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.ComponentModel; + +namespace LumenWorks.Framework.IO.Csv +{ + public partial class CachedCsvReader + : CsvReader + { + /// + /// Represents a binding list wrapper for a CSV reader. + /// + private class CsvBindingList + : IBindingList, ITypedList, IList, IList + { + #region Fields + + /// + /// Contains the linked CSV reader. + /// + private CachedCsvReader _csv; + + /// + /// Contains the cached record count. + /// + private int _count; + + /// + /// Contains the cached property descriptors. + /// + private PropertyDescriptorCollection _properties; + + /// + /// Contains the current sort property. + /// + private CsvPropertyDescriptor _sort; + + /// + /// Contains the current sort direction. + /// + private ListSortDirection _direction; + + #endregion + + #region Constructors + + /// + /// Initializes a new instance of the CsvBindingList class. + /// + /// + public CsvBindingList(CachedCsvReader csv) + { + _csv = csv; + _count = -1; + _direction = ListSortDirection.Ascending; + } + + #endregion + + #region IBindingList members + + public void AddIndex(PropertyDescriptor property) + { + } + + public bool AllowNew + { + get + { + return false; + } + } + + public void ApplySort(PropertyDescriptor property, System.ComponentModel.ListSortDirection direction) + { + _sort = (CsvPropertyDescriptor) property; + _direction = direction; + + _csv.ReadToEnd(); + + _csv._records.Sort(new CsvRecordComparer(_sort.Index, _direction)); + } + + public PropertyDescriptor SortProperty + { + get + { + return _sort; + } + } + + public int Find(PropertyDescriptor property, object key) + { + int fieldIndex = ((CsvPropertyDescriptor) property).Index; + string value = (string) key; + + int recordIndex = 0; + int count = this.Count; + + while (recordIndex < count && _csv[recordIndex, fieldIndex] != value) + recordIndex++; + + if (recordIndex == count) + return -1; + else + return recordIndex; + } + + public bool SupportsSorting + { + get + { + return true; + } + } + + public bool IsSorted + { + get + { + return _sort != null; + } + } + + public bool AllowRemove + { + get + { + return false; + } + } + + public bool SupportsSearching + { + get + { + return true; + } + } + + public System.ComponentModel.ListSortDirection SortDirection + { + get + { + return _direction; + } + } + + public event System.ComponentModel.ListChangedEventHandler ListChanged + { + add { } + remove { } + } + + public bool SupportsChangeNotification + { + get + { + return false; + } + } + + public void RemoveSort() + { + _sort = null; + _direction = ListSortDirection.Ascending; + } + + public object AddNew() + { + throw new NotSupportedException(); + } + + public bool AllowEdit + { + get + { + return false; + } + } + + public void RemoveIndex(PropertyDescriptor property) + { + } + + #endregion + + #region ITypedList Members + + public PropertyDescriptorCollection GetItemProperties(PropertyDescriptor[] listAccessors) + { + if (_properties == null) + { + PropertyDescriptor[] properties = new PropertyDescriptor[_csv.FieldCount]; + + for (int i = 0; i < properties.Length; i++) + properties[i] = new CsvPropertyDescriptor(((System.Data.IDataReader) _csv).GetName(i), i); + + _properties = new PropertyDescriptorCollection(properties); + } + + return _properties; + } + + public string GetListName(PropertyDescriptor[] listAccessors) + { + return string.Empty; + } + + #endregion + + #region IList Members + + public int IndexOf(string[] item) + { + throw new NotSupportedException(); + } + + public void Insert(int index, string[] item) + { + throw new NotSupportedException(); + } + + public void RemoveAt(int index) + { + throw new NotSupportedException(); + } + + public string[] this[int index] + { + get + { + _csv.MoveTo(index); + return _csv._records[index]; + } + set + { + throw new NotSupportedException(); + } + } + + #endregion + + #region ICollection Members + + public void Add(string[] item) + { + throw new NotSupportedException(); + } + + public void Clear() + { + throw new NotSupportedException(); + } + + public bool Contains(string[] item) + { + throw new NotSupportedException(); + } + + public void CopyTo(string[][] array, int arrayIndex) + { + _csv.MoveToStart(); + + while (_csv.ReadNextRecord()) + _csv.CopyCurrentRecordTo(array[arrayIndex++]); + } + + public int Count + { + get + { + if (_count < 0) + { + _csv.ReadToEnd(); + _count = (int) _csv.CurrentRecordIndex + 1; + } + + return _count; + } + } + + public bool IsReadOnly + { + get { return true; } + } + + public bool Remove(string[] item) + { + throw new NotSupportedException(); + } + + #endregion + + #region IEnumerable Members + + public IEnumerator GetEnumerator() + { + return _csv.GetEnumerator(); + } + + #endregion + + #region IList Members + + public int Add(object value) + { + throw new NotSupportedException(); + } + + public bool Contains(object value) + { + throw new NotSupportedException(); + } + + public int IndexOf(object value) + { + throw new NotSupportedException(); + } + + public void Insert(int index, object value) + { + throw new NotSupportedException(); + } + + public bool IsFixedSize + { + get { return true; } + } + + public void Remove(object value) + { + throw new NotSupportedException(); + } + + object IList.this[int index] + { + get + { + return this[index]; + } + set + { + throw new NotSupportedException(); + } + } + + #endregion + + #region ICollection Members + + public void CopyTo(Array array, int index) + { + _csv.MoveToStart(); + + while (_csv.ReadNextRecord()) + _csv.CopyCurrentRecordTo((string[]) array.GetValue(index++)); + } + + public bool IsSynchronized + { + get { return false; } + } + + public object SyncRoot + { + get { return null; } + } + + #endregion + + #region IEnumerable Members + + System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() + { + return this.GetEnumerator(); + } + + #endregion + } + } +} \ No newline at end of file Index: DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvImporterSurfaceLines.cs =================================================================== diff -u -r3102 -r3110 --- DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvImporterSurfaceLines.cs (.../CsvImporterSurfaceLines.cs) (revision 3102) +++ DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvImporterSurfaceLines.cs (.../CsvImporterSurfaceLines.cs) (revision 3110) @@ -18,15 +18,200 @@ // All names, logos, and references to "Deltares" are registered trademarks of // Stichting Deltares and remain full property of Stichting Deltares at all times. // All rights reserved. + using System; using System.Collections.Generic; +using System.IO; using System.Linq; -using System.Text; -using System.Threading.Tasks; +using System.Threading; +using LumenWorks.Framework.IO.Csv; namespace Deltares.LayerOnSlopeTool.Data.CsvImporters { + /// + /// Holds the importer for the surface lines from csv files + /// public class CsvImporterSurfaceLines { + private readonly List surfaceLineRecords = new List(); + private List errorMessages = new List(); + private const string LocationColumnName = "LocationId"; + private const string ProfileNameColumnName = "Profielnaam"; + private const string FirstXColumnName = "X1"; + + /// + /// Record used for importing the items from the surface line csv file. + /// + public class SurfaceLineRecord + { + private IList xcoors = new List(); + private IList ycoors = new List(); + private IList zcoors = new List(); + + public string SurfaceLineId { get; set; } + public int SurfaceLineRecordId { get; set; } + + public IList Xcoors + { + get { return xcoors; } + set { xcoors = value; } + } + + public IList Ycoors + { + get { return ycoors; } + set { ycoors = value; } + } + + public IList Zcoors + { + get { return zcoors; } + set { zcoors = value; } + } + } + + private void CheckColumn(int index, string fileName, string fieldName) + { + if (index < 0) + { + var csvHeaderFieldError = "In de header ontbreekt veld:"; + throw new ArgumentException(string.Format("{0} : {1} {2}", fileName, csvHeaderFieldError, fieldName)); + } + } + + /// + /// Copies to temporary file. + /// + /// Name of the file. + /// the name of the temporary file + private string CopyToTemporaryFile(string fileName) + { + string newFilename = Path.GetTempFileName(); + File.Delete(newFilename); + File.Copy(fileName, newFilename); + return newFilename; + } + + public CsvImporterSurfaceLines(string filename) + { + errorMessages.Clear(); + if (filename == "") + { + throw new ArgumentException("De filenaam voor de surfaceline csv is leeg."); + } + + if (!File.Exists(filename)) + { + throw new ArgumentException(string.Format("De surfaceline csv file met naam {0} bestaat niet.", filename)); + } + + // This is a 'dynamic' csv file, i.e. the number of columns is not known and can vary per row. So make sure of a + // proper header (i.e. a header with the maximum number of fields + // because the file will be changed we copy it to a temporary file and perform the action on the temporary file + string tempFilename = CopyToTemporaryFile(filename); + CsvReaderUtilities.EnsureProperHeaderForDynamicRecordLengthInCsvFile(tempFilename); + var cultureInfo = Thread.CurrentThread.CurrentCulture; + try + { + Thread.CurrentThread.CurrentCulture = CsvReaderUtilities.DetermineCultureForFile(tempFilename); + using (CsvReader csv = new CsvReader(new StreamReader(tempFilename), true, ';') { MissingFieldAction = MissingFieldAction.ReplaceByNull }) + { + string[] headers = CsvImporterHelper.GetFieldHeaders(this, csv); + int colIndexSurfaceLineId = CsvReaderUtilities.GetHeaderIndexByString(headers, LocationColumnName); + if (colIndexSurfaceLineId < 0) + { + // colIndexSurfaceLineId can be defined with 2 identifiers (ProfileNameColumnName is deprecated, LocationColumnName is the new one) + colIndexSurfaceLineId = CsvReaderUtilities.GetHeaderIndexByString(headers, LocationColumnName); + if (colIndexSurfaceLineId < 0) + { + colIndexSurfaceLineId = CsvReaderUtilities.GetHeaderIndexByString(headers, ProfileNameColumnName); + } + } + CheckColumn(colIndexSurfaceLineId, tempFilename, LocationColumnName); + int colIndexFirstX = CsvReaderUtilities.GetHeaderIndexByString(headers, FirstXColumnName); + CheckColumn(colIndexFirstX, tempFilename, FirstXColumnName); + int maxColumns = headers.Count(); + var end = Math.Floor((maxColumns - colIndexFirstX + 1) / 3.0); + var surflineIndex = 1; + while (csv.ReadNextRecord()) + { + var recordReadError = false; + SurfaceLineRecord surfaceLine = new SurfaceLineRecord + { + SurfaceLineRecordId = surflineIndex + }; + surflineIndex++; + surfaceLine.SurfaceLineId = csv[colIndexSurfaceLineId]; + var colIndex = colIndexFirstX; + for (int i = 0; i < end; i++) + { + // if the first value = null the end of this surfaceline is reached, so break and it. Checking on null can only be done this way, + // a csv[index] == null does not work as that throws an exception itself. + try + { + var dum = csv[colIndex]; + if (String.IsNullOrEmpty(dum)) + break; + } + catch (Exception) + { + break; + } + // other "errors" are real errors so trap them. + try + { + surfaceLine.Xcoors.Add(Convert.ToDouble(csv[colIndex])); + colIndex++; + surfaceLine.Ycoors.Add(Convert.ToDouble(csv[colIndex])); + colIndex++; + surfaceLine.Zcoors.Add(Convert.ToDouble(csv[colIndex])); + colIndex++; + } + catch (Exception e) + { + var csvSurfaceLineError = String.Format("Tijdens het lezen van surface line {0} in kolom {1} uit de csv trad de volgende fout op: ", + surfaceLine.SurfaceLineRecordId, colIndex + 1); + errorMessages.Add(csvSurfaceLineError + e.Message); + recordReadError = true; + break; + } + } + if (!recordReadError) + { + surfaceLineRecords.Add(surfaceLine); + } + } + } + } + finally + { + File.Delete(tempFilename); + Thread.CurrentThread.CurrentCulture = cultureInfo; + } + } + + /// + /// Gets the imported items. + /// + /// + /// The imported items. + /// + public List ImportedItems + { + get { return surfaceLineRecords; } + } + + /// + /// Gets the error messages. + /// + /// + /// The error messages. + /// + public List ErrorMessages + { + get { return errorMessages; } + set { errorMessages = value; } + } + } } Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/Events/ParseErrorEventArgs.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/Events/ParseErrorEventArgs.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/Events/ParseErrorEventArgs.cs (revision 3110) @@ -0,0 +1,85 @@ +// LumenWorks.Framework.IO.CSV.ParseErrorEventArgs +// Copyright (c) 2006 S�bastien Lorion +// +// MIT license (http://en.wikipedia.org/wiki/MIT_License) +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do so, +// subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +using System; + +namespace LumenWorks.Framework.IO.Csv +{ + /// + /// Provides data for the event. + /// + public class ParseErrorEventArgs + : EventArgs + { + #region Fields + + /// + /// Contains the error that occured. + /// + private MalformedCsvException _error; + + /// + /// Contains the action to take. + /// + private ParseErrorAction _action; + + #endregion + + #region Constructors + + /// + /// Initializes a new instance of the ParseErrorEventArgs class. + /// + /// The error that occured. + /// The default action to take. + public ParseErrorEventArgs(MalformedCsvException error, ParseErrorAction defaultAction) + : base() + { + _error = error; + _action = defaultAction; + } + + #endregion + + #region Properties + + /// + /// Gets the error that occured. + /// + /// The error that occured. + public MalformedCsvException Error + { + get { return _error; } + } + + /// + /// Gets or sets the action to take. + /// + /// The action to take. + public ParseErrorAction Action + { + get { return _action; } + set { _action = value; } + } + + #endregion + } +} \ No newline at end of file Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CsvReader.DataReaderValidations.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CsvReader.DataReaderValidations.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/CsvReader.DataReaderValidations.cs (revision 3110) @@ -0,0 +1,50 @@ +// LumenWorks.Framework.IO.CSV.CsvReader.DataReaderValidations +// Copyright (c) 2005 S�bastien Lorion +// +// MIT license (http://en.wikipedia.org/wiki/MIT_License) +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do so, +// subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +using System; + +namespace LumenWorks.Framework.IO.Csv +{ + public partial class CsvReader + { + /// + /// Defines the data reader validations. + /// + [Flags] + private enum DataReaderValidations + { + /// + /// No validation. + /// + None = 0, + + /// + /// Validate that the data reader is initialized. + /// + IsInitialized = 1, + + /// + /// Validate that the data reader is not closed. + /// + IsNotClosed = 2 + } + } +} \ No newline at end of file Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/Exceptions/MissingFieldCsvException.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/Exceptions/MissingFieldCsvException.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/Exceptions/MissingFieldCsvException.cs (revision 3110) @@ -0,0 +1,107 @@ +// LumenWorks.Framework.IO.Csv.MissingFieldCsvException +// Copyright (c) 2005 S�bastien Lorion +// +// MIT license (http://en.wikipedia.org/wiki/MIT_License) +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do so, +// subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +using System; +using System.Globalization; +using System.Runtime.Serialization; +using System.Security.Permissions; + +using LumenWorks.Framework.IO.Csv.Resources; + +namespace LumenWorks.Framework.IO.Csv +{ + /// + /// Represents the exception that is thrown when a there is a missing field in a record of the CSV file. + /// + /// + /// MissingFieldException would have been a better name, but there is already a . + /// + [Serializable()] + public class MissingFieldCsvException + : MalformedCsvException + { + #region Constructors + + /// + /// Initializes a new instance of the MissingFieldCsvException class. + /// + public MissingFieldCsvException() + : base() + { + } + + /// + /// Initializes a new instance of the MissingFieldCsvException class. + /// + /// The message that describes the error. + public MissingFieldCsvException(string message) + : base(message) + { + } + + /// + /// Initializes a new instance of the MissingFieldCsvException class. + /// + /// The message that describes the error. + /// The exception that is the cause of the current exception. + public MissingFieldCsvException(string message, Exception innerException) + : base(message, innerException) + { + } + + /// + /// Initializes a new instance of the MissingFieldCsvException class. + /// + /// The raw data when the error occured. + /// The current position in the raw data. + /// The current record index. + /// The current field index. + public MissingFieldCsvException(string rawData, int currentPosition, long currentRecordIndex, int currentFieldIndex) + : base(rawData, currentPosition, currentRecordIndex, currentFieldIndex) + { + } + + /// + /// Initializes a new instance of the MissingFieldCsvException class. + /// + /// The raw data when the error occured. + /// The current position in the raw data. + /// The current record index. + /// The current field index. + /// The exception that is the cause of the current exception. + public MissingFieldCsvException(string rawData, int currentPosition, long currentRecordIndex, int currentFieldIndex, Exception innerException) + : base(rawData, currentPosition, currentRecordIndex, currentFieldIndex, innerException) + { + } + + /// + /// Initializes a new instance of the MissingFieldCsvException class with serialized data. + /// + /// The that holds the serialized object data about the exception being thrown. + /// The that contains contextual information about the source or destination. + protected MissingFieldCsvException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + + #endregion + } +} \ No newline at end of file Index: DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvColumnNames.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvColumnNames.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Deltares.LayerOnSlopeTool.Data/CsvImporters/CsvColumnNames.cs (revision 3110) @@ -0,0 +1,63 @@ +// Copyright (C) Stichting Deltares 2020. All rights reserved. +// +// This file is part of the LayerOnSlopeTool +// +// The LayerOnSlopeTool is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . +// +// All names, logos, and references to "Deltares" are registered trademarks of +// Stichting Deltares and remain full property of Stichting Deltares at all times. +// All rights reserved. + +namespace Deltares.LayerOnSlopeTool.Data.CsvImporters +{ + + public class CsvColumnNames + { + /// + /// location_id + /// + public const string LocationColumnName = "location_id"; + + /// + /// surfaceline_id + /// + public const string SurfaceLineColumnName = "surfaceline_id"; + + /// + /// x_soilgeometry2D_origin + /// + public const string XSoilGeometry2DOriginColumnName = "x_soilgeometry2D_origin"; + + /// + /// ophoogmateriaaldijk + /// + public const string DikeEmbankmentMaterialColumnName = "ophoogmateriaaldijk"; + + /// + /// soilgeometry2D_name + /// + public const string SoilGeometryNameColumnName = "soilgeometry2D_name"; + + /// + /// slope_layer_thickness + /// + public const string LayerThicknessColumnName = "slope_layer_thickness"; + + /// + /// slope_layer_material + /// + public const string LayerMaterialColumnName = "slope_layer_material"; + + } +} Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/Exceptions/MalformedCsvException.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/Exceptions/MalformedCsvException.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/Exceptions/MalformedCsvException.cs (revision 3110) @@ -0,0 +1,221 @@ +// LumenWorks.Framework.IO.Csv.MalformedCsvException +// Copyright (c) 2005 S�bastien Lorion +// +// MIT license (http://en.wikipedia.org/wiki/MIT_License) +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do so, +// subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +using System; +using System.Globalization; +using System.Runtime.Serialization; +using System.Security.Permissions; + +using LumenWorks.Framework.IO.Csv.Resources; + +namespace LumenWorks.Framework.IO.Csv +{ + /// + /// Represents the exception that is thrown when a CSV file is malformed. + /// + [Serializable()] + public class MalformedCsvException + : Exception + { + #region Fields + + /// + /// Contains the message that describes the error. + /// + private string _message; + + /// + /// Contains the raw data when the error occured. + /// + private string _rawData; + + /// + /// Contains the current field index. + /// + private int _currentFieldIndex; + + /// + /// Contains the current record index. + /// + private long _currentRecordIndex; + + /// + /// Contains the current position in the raw data. + /// + private int _currentPosition; + + #endregion + + #region Constructors + + /// + /// Initializes a new instance of the MalformedCsvException class. + /// + public MalformedCsvException() + : this(null, null) + { + } + + /// + /// Initializes a new instance of the MalformedCsvException class. + /// + /// The message that describes the error. + public MalformedCsvException(string message) + : this(message, null) + { + } + + /// + /// Initializes a new instance of the MalformedCsvException class. + /// + /// The message that describes the error. + /// The exception that is the cause of the current exception. + public MalformedCsvException(string message, Exception innerException) + : base(String.Empty, innerException) + { + _message = (message == null ? string.Empty : message); + + _rawData = string.Empty; + _currentPosition = -1; + _currentRecordIndex = -1; + _currentFieldIndex = -1; + } + + /// + /// Initializes a new instance of the MalformedCsvException class. + /// + /// The raw data when the error occured. + /// The current position in the raw data. + /// The current record index. + /// The current field index. + public MalformedCsvException(string rawData, int currentPosition, long currentRecordIndex, int currentFieldIndex) + : this(rawData, currentPosition, currentRecordIndex, currentFieldIndex, null) + { + } + + /// + /// Initializes a new instance of the MalformedCsvException class. + /// + /// The raw data when the error occured. + /// The current position in the raw data. + /// The current record index. + /// The current field index. + /// The exception that is the cause of the current exception. + public MalformedCsvException(string rawData, int currentPosition, long currentRecordIndex, int currentFieldIndex, Exception innerException) + : base(String.Empty, innerException) + { + _rawData = (rawData == null ? string.Empty : rawData); + _currentPosition = currentPosition; + _currentRecordIndex = currentRecordIndex; + _currentFieldIndex = currentFieldIndex; + + _message = String.Format(CultureInfo.InvariantCulture, ExceptionMessage.MalformedCsvException, _currentRecordIndex, _currentFieldIndex, _currentPosition, _rawData); + } + + /// + /// Initializes a new instance of the MalformedCsvException class with serialized data. + /// + /// The that holds the serialized object data about the exception being thrown. + /// The that contains contextual information about the source or destination. + protected MalformedCsvException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + _message = info.GetString("MyMessage"); + + _rawData = info.GetString("RawData"); + _currentPosition = info.GetInt32("CurrentPosition"); + _currentRecordIndex = info.GetInt64("CurrentRecordIndex"); + _currentFieldIndex = info.GetInt32("CurrentFieldIndex"); + } + + #endregion + + #region Properties + + /// + /// Gets the raw data when the error occured. + /// + /// The raw data when the error occured. + public string RawData + { + get { return _rawData; } + } + + /// + /// Gets the current position in the raw data. + /// + /// The current position in the raw data. + public int CurrentPosition + { + get { return _currentPosition; } + } + + /// + /// Gets the current record index. + /// + /// The current record index. + public long CurrentRecordIndex + { + get { return _currentRecordIndex; } + } + + /// + /// Gets the current field index. + /// + /// The current record index. + public int CurrentFieldIndex + { + get { return _currentFieldIndex; } + } + + #endregion + + #region Overrides + + /// + /// Gets a message that describes the current exception. + /// + /// A message that describes the current exception. + public override string Message + { + get { return _message; } + } + + /// + /// When overridden in a derived class, sets the with information about the exception. + /// + /// The that holds the serialized object data about the exception being thrown. + /// The that contains contextual information about the source or destination. + public override void GetObjectData(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) + { + base.GetObjectData(info, context); + + info.AddValue("MyMessage", _message); + + info.AddValue("RawData", _rawData); + info.AddValue("CurrentPosition", _currentPosition); + info.AddValue("CurrentRecordIndex", _currentRecordIndex); + info.AddValue("CurrentFieldIndex", _currentFieldIndex); + } + + #endregion + } +} \ No newline at end of file Index: DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/ParseErrorAction.cs =================================================================== diff -u --- DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/ParseErrorAction.cs (revision 0) +++ DamTools/LayerOnSlopeTool/trunk/src/Tools/LumenWorks.Framework.IO/Csv/ParseErrorAction.cs (revision 3110) @@ -0,0 +1,44 @@ +// LumenWorks.Framework.IO.CSV.ParseErrorAction +// Copyright (c) 2006 S�bastien Lorion +// +// MIT license (http://en.wikipedia.org/wiki/MIT_License) +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do so, +// subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +namespace LumenWorks.Framework.IO.Csv +{ + /// + /// Specifies the action to take when a parsing error has occured. + /// + public enum ParseErrorAction + { + /// + /// Raises the event. + /// + RaiseEvent = 0, + + /// + /// Tries to advance to next line. + /// + AdvanceToNextLine = 1, + + /// + /// Throws an exception. + /// + ThrowException = 2, + } +}