diff --git a/ETLBox.sln b/ETLBox.sln new file mode 100644 index 00000000..a4734496 --- /dev/null +++ b/ETLBox.sln @@ -0,0 +1,31 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 15 +VisualStudioVersion = 15.0.27428.2005 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ETLBox", "ETLBox\ETLBox.csproj", "{0C7548BA-A597-488A-8ADD-2F628BE691D6}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ETLBoxTest", "ETLBoxTest\ETLBoxTest.csproj", "{E18C5029-9FA4-4846-828C-1B42D5D18C3C}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {0C7548BA-A597-488A-8ADD-2F628BE691D6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0C7548BA-A597-488A-8ADD-2F628BE691D6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0C7548BA-A597-488A-8ADD-2F628BE691D6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0C7548BA-A597-488A-8ADD-2F628BE691D6}.Release|Any CPU.Build.0 = Release|Any CPU + {E18C5029-9FA4-4846-828C-1B42D5D18C3C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E18C5029-9FA4-4846-828C-1B42D5D18C3C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E18C5029-9FA4-4846-828C-1B42D5D18C3C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E18C5029-9FA4-4846-828C-1B42D5D18C3C}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {A83E1CF2-2A7A-4315-8474-C060C51E118C} + EndGlobalSection +EndGlobal diff --git a/ETLBox/Definitions/ConnectionManager/DbConnectionManager.cs b/ETLBox/Definitions/ConnectionManager/DbConnectionManager.cs new file mode 100644 index 00000000..60e76570 --- /dev/null +++ b/ETLBox/Definitions/ConnectionManager/DbConnectionManager.cs @@ -0,0 +1,93 @@ +using System; +using System.Data; +using System.Threading.Tasks; + +namespace ALE.ETLBox { + public abstract class DbConnectionManager : IDisposable, IDbConnectionManager + where Connection : class, IDbConnection, new() + where Command : class, IDbCommand, new() { + public int MaxLoginAttempts { get; set; } = 20; + + public ConnectionString ConnectionString { get; set; } + + internal Connection DbConnection { get; set; } + + internal bool IsConnectionOpen => DbConnection?.State == ConnectionState.Open; + + public DbConnectionManager() { } + + public DbConnectionManager(ConnectionString connectionString) : this() { + this.ConnectionString = connectionString; + } + + public void Open() { + DbConnection = new Connection(); + if (!IsConnectionOpen) { + DbConnection.ConnectionString = ConnectionString.Value; + bool successfullyConnected = false; + Exception lastException = null; + for (int i = 1; i <= MaxLoginAttempts; i++) { + try { + DbConnection.Open(); + successfullyConnected = true; + } catch (Exception e) { + successfullyConnected = false; + lastException = e; + Task.Delay(500 * i).Wait(); + } + if (successfullyConnected) break; + } + if (!successfullyConnected) + throw lastException ?? new Exception("Could not connect to database!"); + } + } + + //public void CloseConnection() => Close(); + + public Command CreateCommand(string commandText) { + var cmd = DbConnection.CreateCommand(); + cmd.CommandTimeout = 0; + cmd.CommandText = commandText; + return cmd as Command; + } + + public int ExecuteNonQuery(string commandText) { + Command sqlcmd = CreateCommand(commandText); + return sqlcmd.ExecuteNonQuery(); + } + + public object ExecuteScalar(string commandText) { + Command cmd = CreateCommand(commandText); + return cmd.ExecuteScalar(); + } + + public IDataReader ExecuteReader(string commandText) { + Command cmd = CreateCommand(commandText); + return cmd.ExecuteReader(); + + } + + public abstract void BulkInsert(IDataReader data, IColumnMappingCollection columnMapping, string tableName); + + #region IDisposable Support + private bool disposedValue = false; // To detect redundant calls + + protected void Dispose(bool disposing) { + if (!disposedValue) { + if (disposing) { + if (DbConnection != null) + DbConnection.Close(); + DbConnection = null; + } + disposedValue = true; + } + } + + public void Dispose() => Dispose(true); + public void Close() => Dispose(); + + public abstract IDbConnectionManager Clone(); + #endregion + + } +} diff --git a/ETLBox/Definitions/ConnectionManager/IConnectionManager.cs b/ETLBox/Definitions/ConnectionManager/IConnectionManager.cs new file mode 100644 index 00000000..4d527559 --- /dev/null +++ b/ETLBox/Definitions/ConnectionManager/IConnectionManager.cs @@ -0,0 +1,10 @@ +using System; + +namespace ALE.ETLBox { + public interface IConnectionManager : IDisposable { + ConnectionString ConnectionString { get; } + void Open(); + void Close(); + + } +} diff --git a/ETLBox/Definitions/ConnectionManager/ICubeConnectionManager.cs b/ETLBox/Definitions/ConnectionManager/ICubeConnectionManager.cs new file mode 100644 index 00000000..bcb6c54d --- /dev/null +++ b/ETLBox/Definitions/ConnectionManager/ICubeConnectionManager.cs @@ -0,0 +1,7 @@ +namespace ALE.ETLBox { + public interface ICubeConnectionManager : IConnectionManager { + void Process(); + void DropIfExists(); + ICubeConnectionManager Clone(); + } +} diff --git a/ETLBox/Definitions/ConnectionManager/IDbConnectionManager.cs b/ETLBox/Definitions/ConnectionManager/IDbConnectionManager.cs new file mode 100644 index 00000000..86afaf7b --- /dev/null +++ b/ETLBox/Definitions/ConnectionManager/IDbConnectionManager.cs @@ -0,0 +1,11 @@ +using System.Data; + +namespace ALE.ETLBox { + public interface IDbConnectionManager : IConnectionManager { + int ExecuteNonQuery(string command); + object ExecuteScalar(string command); + IDataReader ExecuteReader(string command); + void BulkInsert(IDataReader data, IColumnMappingCollection columnMapping, string tableName); + IDbConnectionManager Clone(); + } +} diff --git a/ETLBox/Definitions/DataFlow/IDataFlowDestination.cs b/ETLBox/Definitions/DataFlow/IDataFlowDestination.cs new file mode 100644 index 00000000..e02ecf0e --- /dev/null +++ b/ETLBox/Definitions/DataFlow/IDataFlowDestination.cs @@ -0,0 +1,5 @@ +namespace ALE.ETLBox { + public interface IDataFlowDestination : IDataFlowLinkTarget { + void Wait(); + } +} diff --git a/ETLBox/Definitions/DataFlow/IDataFlowLinkSource.cs b/ETLBox/Definitions/DataFlow/IDataFlowLinkSource.cs new file mode 100644 index 00000000..c6373db7 --- /dev/null +++ b/ETLBox/Definitions/DataFlow/IDataFlowLinkSource.cs @@ -0,0 +1,7 @@ +using System.Threading.Tasks.Dataflow; + +namespace ALE.ETLBox { + public interface IDataFlowLinkSource { + ISourceBlock SourceBlock { get; } + } +} diff --git a/ETLBox/Definitions/DataFlow/IDataFlowLinkTarget.cs b/ETLBox/Definitions/DataFlow/IDataFlowLinkTarget.cs new file mode 100644 index 00000000..8bfdde2e --- /dev/null +++ b/ETLBox/Definitions/DataFlow/IDataFlowLinkTarget.cs @@ -0,0 +1,7 @@ +using System.Threading.Tasks.Dataflow; + +namespace ALE.ETLBox { + public interface IDataFlowLinkTarget { + ITargetBlock TargetBlock { get; } + } +} diff --git a/ETLBox/Definitions/DataFlow/IDataFlowSource.cs b/ETLBox/Definitions/DataFlow/IDataFlowSource.cs new file mode 100644 index 00000000..dcc8d535 --- /dev/null +++ b/ETLBox/Definitions/DataFlow/IDataFlowSource.cs @@ -0,0 +1,6 @@ +namespace ALE.ETLBox { + public interface IDataFlowSource : IDataFlowLinkSource { + void ExecuteAsync(); + void LinkTo(IDataFlowLinkTarget target); + } +} diff --git a/ETLBox/Definitions/DataFlow/IDataFlowTransformation.cs b/ETLBox/Definitions/DataFlow/IDataFlowTransformation.cs new file mode 100644 index 00000000..093d34ca --- /dev/null +++ b/ETLBox/Definitions/DataFlow/IDataFlowTransformation.cs @@ -0,0 +1,4 @@ +namespace ALE.ETLBox { + public interface IDataFlowTransformation : IDataFlowLinkSource, IDataFlowLinkTarget { + } +} diff --git a/ETLBox/Definitions/DataFlow/TypeInfo.cs b/ETLBox/Definitions/DataFlow/TypeInfo.cs new file mode 100644 index 00000000..adaa3777 --- /dev/null +++ b/ETLBox/Definitions/DataFlow/TypeInfo.cs @@ -0,0 +1,25 @@ +using System; +using System.Reflection; + +namespace ALE.ETLBox { + public class TypeInfo { + public PropertyInfo[] PropertyInfos { get; set; } + public int PropertyLength { get; set; } + public bool IsArray { get; set; } = true; + + public TypeInfo() { + + } + + public TypeInfo(Type typ) { + GatherTypeInfos(typ); + } + private void GatherTypeInfos(Type typ) { + IsArray = typ.IsArray; + if (!typ.IsArray) { + PropertyInfos = typ.GetProperties(); + PropertyLength = PropertyInfos.Length; + } + } + } +} diff --git a/ETLBox/Definitions/Database/ITableColumn.cs b/ETLBox/Definitions/Database/ITableColumn.cs new file mode 100644 index 00000000..52e772e6 --- /dev/null +++ b/ETLBox/Definitions/Database/ITableColumn.cs @@ -0,0 +1,16 @@ +namespace ALE.ETLBox { + public interface ITableColumn { + string Name { get; } + string DataType { get; } + bool AllowNulls { get; } + bool IsIdentity { get; } + int? IdentitySeed { get; } + int? IdentityIncrement { get; } + bool IsPrimaryKey { get; } + string DefaultValue { get; } + string DefaultConstraintName { get; } + string Collation { get; } + string ComputedColumn { get; } + + } +} diff --git a/ETLBox/Definitions/Database/ProcedureDefinition.cs b/ETLBox/Definitions/Database/ProcedureDefinition.cs new file mode 100644 index 00000000..bb0878cd --- /dev/null +++ b/ETLBox/Definitions/Database/ProcedureDefinition.cs @@ -0,0 +1,25 @@ +using System.Collections.Generic; + +namespace ALE.ETLBox { + public class ProcedureDefinition { + public string Name { get; set; } + public string Definition { get; set; } + + public List Parameter { get; set; } + + public ProcedureDefinition() { + Parameter = new List(); + } + + public ProcedureDefinition(string name, string definition) : this() { + Name = name; + Definition = definition; + } + + public ProcedureDefinition(string name, string definition, List parameter) : this(name, definition) { + Parameter = parameter; + } + + + } +} diff --git a/ETLBox/Definitions/Database/ProcedureParameter.cs b/ETLBox/Definitions/Database/ProcedureParameter.cs new file mode 100644 index 00000000..fa271a82 --- /dev/null +++ b/ETLBox/Definitions/Database/ProcedureParameter.cs @@ -0,0 +1,37 @@ +using System; + +namespace ALE.ETLBox { + public class ProcedureParameter { + public string Name { get; set; } + public string DataType { get; set; } + public string DefaultValue { get; set; } + public bool HasDefaultValue => !String.IsNullOrWhiteSpace(DefaultValue); + public bool ReadOnly { get; set; } + public bool Out { get; set; } + public string Sql { + get { + string sql = $@"@{Name} {DataType}"; + if (HasDefaultValue) + sql += $" = {DefaultValue}"; + if (Out) + sql += " OUT"; + if (ReadOnly) + sql += " READONLY"; + return sql; + } + } + + public ProcedureParameter() { + } + + public ProcedureParameter(string name, string dataType) : this() { + Name = name; + DataType = dataType; + } + + public ProcedureParameter(string name, string dataType, string defaultValue) : this(name, dataType) { + DefaultValue = defaultValue; + } + + } +} diff --git a/ETLBox/Definitions/Database/TableColumn.cs b/ETLBox/Definitions/Database/TableColumn.cs new file mode 100644 index 00000000..776662e8 --- /dev/null +++ b/ETLBox/Definitions/Database/TableColumn.cs @@ -0,0 +1,54 @@ +using System; +using System.Data; + +namespace ALE.ETLBox { + public class TableColumn : ITableColumn, IColumnMapping { + private string _dataSetColumn; + private string _sourceColumn; + + public string Name { get; set; } + public string DataType { get; set; } + public bool AllowNulls { get; set; } + public bool IsIdentity { get; set; } + public int? IdentitySeed { get; set; } + public int? IdentityIncrement { get; set; } + public bool IsPrimaryKey { get; set; } + public string DefaultValue { get; set; } + public string DefaultConstraintName { get; set; } + public string Collation { get; set; } + public string ComputedColumn { get; set; } + public System.Type NETDataType => Type.GetType(DataTypeConverter.GetObjectTypeString(DataType)); + + + public string DataSetColumn { + get { return String.IsNullOrWhiteSpace(_dataSetColumn) ? Name : _dataSetColumn; } + set { + _dataSetColumn = value; + } + } + public string SourceColumn { + get { return String.IsNullOrWhiteSpace(_sourceColumn) ? Name : _sourceColumn; } + set { + _sourceColumn = value; + } + } + + public TableColumn() { } + public TableColumn(string name, string dataType) : this() { + Name = name; + DataType = dataType; + } + + public TableColumn(string name, string dataType, bool allowNulls) : this(name, dataType) { + AllowNulls = allowNulls; + } + + public TableColumn(string name, string dataType, bool allowNulls, bool isPrimaryKey) : this(name, dataType, allowNulls) { + IsPrimaryKey = isPrimaryKey; + } + + public TableColumn(string name, string dataType, bool allowNulls, bool isPrimaryKey, bool isIdentity) : this(name, dataType, allowNulls, isPrimaryKey) { + IsIdentity = isIdentity; + } + } +} diff --git a/ETLBox/Definitions/Database/TableData.cs b/ETLBox/Definitions/Database/TableData.cs new file mode 100644 index 00000000..7ef3bfd7 --- /dev/null +++ b/ETLBox/Definitions/Database/TableData.cs @@ -0,0 +1,138 @@ +using System; +using System.Collections.Generic; +using System.Data; +using System.Data.Common; + +namespace ALE.ETLBox { + public class TableData : TableData { + public TableData() : base() { } + public TableData(TableDefinition definition) : base() { } + public TableData(TableDefinition definition, int estimatedBatchSize) : base() { } + } + + public class TableData : IDisposable, IDataReader { + public int? EstimatedBatchSize { get; set; } + public DataColumnMappingCollection ColumnMapping { + get { + var mapping = new DataColumnMappingCollection(); + foreach (var col in Definition.Columns) + mapping.Add(new DataColumnMapping(col.SourceColumn, col.DataSetColumn)); + return mapping; + } + } + public bool HasIdentityColumn => IDColumnIndex != null; + public List Rows { get; set; } + + public T[] CurrentRow { get; set; } + int ReadIndex { get; set; } + TableDefinition Definition { get; set; } + int? IDColumnIndex { get; set; } + + public TableData() { + Rows = new List(); + } + public TableData(TableDefinition definition) : this() { + Definition = definition; + } + + public TableData(TableDefinition definition, int estimatedBatchSize) { + Definition = definition; + EstimatedBatchSize = estimatedBatchSize; + Rows = new List(estimatedBatchSize); + } + + + public string[] NewRow() => new string[Definition.Columns.Count]; + public object this[string name] => Rows[GetOrdinal(name)]; + public object this[int i] => Rows[i]; + public int Depth => 0; + public int FieldCount => Rows.Count; + public bool IsClosed => Rows.Count == 0; + public int RecordsAffected => Rows.Count; + public bool GetBoolean(int i) => Convert.ToBoolean(CurrentRow[ShiftIndexAroundIDColumn(i)]); + public byte GetByte(int i) => Convert.ToByte(CurrentRow[ShiftIndexAroundIDColumn(i)]); + public long GetBytes(int i, long fieldOffset, byte[] buffer, int bufferoffset, int length) => 0; + public char GetChar(int i) => Convert.ToChar(CurrentRow[ShiftIndexAroundIDColumn(i)]); + public long GetChars(int i, long fieldoffset, char[] buffer, int bufferoffset, int length) { + string value = Convert.ToString(CurrentRow[ShiftIndexAroundIDColumn(i)]); + buffer = value.Substring(bufferoffset, length).ToCharArray(); + return buffer.Length; + + } + public DateTime GetDateTime(int i) => Convert.ToDateTime(CurrentRow[ShiftIndexAroundIDColumn(i)]); + public IDataReader GetData(int i) => null; + public string GetDataTypeName(int i) => Definition.Columns[ShiftIndexAroundIDColumn(i)].NETDataType.Name; + public decimal GetDecimal(int i) => Convert.ToDecimal(CurrentRow[ShiftIndexAroundIDColumn(i)]); + public double GetDouble(int i) => Convert.ToDouble(CurrentRow[ShiftIndexAroundIDColumn(i)]); + public Type GetFieldType(int i) => Definition.Columns[ShiftIndexAroundIDColumn(i)].NETDataType; + public float GetFloat(int i) => float.Parse(Convert.ToString(CurrentRow[ShiftIndexAroundIDColumn(i)])); + public Guid GetGuid(int i) => Guid.Parse(Convert.ToString(CurrentRow[ShiftIndexAroundIDColumn(i)])); + public short GetInt16(int i) => Convert.ToInt16(CurrentRow[ShiftIndexAroundIDColumn(i)]); + public int GetInt32(int i) => Convert.ToInt32(CurrentRow[ShiftIndexAroundIDColumn(i)]); + public long GetInt64(int i) => Convert.ToInt64(CurrentRow[ShiftIndexAroundIDColumn(i)]); + public string GetName(int i) => Definition.Columns[ShiftIndexAroundIDColumn(i)].Name; + public int GetOrdinal(string name) => Definition.Columns.FindIndex(col => col.Name == name); + public DataTable GetSchemaTable() { + throw new NotImplementedException(); + } + public string GetString(int i) => Convert.ToString(CurrentRow[ShiftIndexAroundIDColumn(i)]); + public object GetValue(int i) => CurrentRow[ShiftIndexAroundIDColumn(i)]; + + int ShiftIndexAroundIDColumn(int i) { + if (IDColumnIndex != null) { + if (i > IDColumnIndex) return i - 1; + else if (i <= IDColumnIndex) return i; + } + return i; + + } + public int GetValues(object[] values) { + values = CurrentRow as object[]; + return values.Length; + } + + public bool IsDBNull(int i) { + if (Definition.Columns[ShiftIndexAroundIDColumn(i)].AllowNulls) + return CurrentRow[ShiftIndexAroundIDColumn(i)] == null; + else + return false; + } + + public bool NextResult() { + return Rows?.Count > (ReadIndex + 1); + } + + public bool Read() { + IDColumnIndex = Definition.IDColumnIndex; + if (Rows?.Count > ReadIndex) { + CurrentRow = Rows[ReadIndex]; + ReadIndex++; + return true; + } else + return false; + } + + #region IDisposable Support + private bool disposedValue = false; + + protected virtual void Dispose(bool disposing) { + if (!disposedValue) { + if (disposing) { + Rows.Clear(); + Rows = null; + } + + disposedValue = true; + } + } + + public void Dispose() { + Dispose(true); + } + + public void Close() { + Dispose(); + } + #endregion + } +} diff --git a/ETLBox/Definitions/Database/TableDefinition.cs b/ETLBox/Definitions/Database/TableDefinition.cs new file mode 100644 index 00000000..9bc1159d --- /dev/null +++ b/ETLBox/Definitions/Database/TableDefinition.cs @@ -0,0 +1,39 @@ +using System.Collections.Generic; +using System.Linq; + +namespace ALE.ETLBox { + public class TableDefinition { + public string Name { get; set; } + public List Columns { get; set; } + public int? IDColumnIndex { + get { + TableColumn idCol = Columns.FirstOrDefault(col => col.IsIdentity); + if (idCol != null) + return Columns.IndexOf(idCol); + else + return null; + } + } + + public string AllColumnsWithoutIdentity => Columns.Where(col => !col.IsIdentity).AsString(); + + + public TableDefinition() { + Columns = new List(); + } + + public TableDefinition(string name) : this() { + Name = name; + } + + public TableDefinition(string name, List columns) : this(name) { + Columns = columns; + } + + public void CreateTable() { + CreateTableTask.Create(this); + } + + + } +} diff --git a/ETLBox/Definitions/Extensions/ExtensionFile.cs b/ETLBox/Definitions/Extensions/ExtensionFile.cs new file mode 100644 index 00000000..2b8c326b --- /dev/null +++ b/ETLBox/Definitions/Extensions/ExtensionFile.cs @@ -0,0 +1,57 @@ +using System.IO; +using System.Text.RegularExpressions; + +namespace ALE.ETLBox { + public class ExtensionFile { + public const string VERSIONMATCH = @"--\W*@version\W*:\W*[><=]?(\d+.\d+)\W*([VFvf][Pp]\d+)?"; + public const string SKIPNEXT = @"--\W*@skipnext\W*:\W*[Tt]rue"; + public const string FILENAMEMATCH = @"^(\w*?)_([0-9A-Za-z!%&()=+#~äöüÄÖÜß -]*).sql"; + + public string Type { get; private set; } + public string Name { get; private set; } + + public string Content { get; private set; } + + public string FileName { get; set; } + + public bool IsStagExtension => Type.ToLower() == ExtensionFileLoader.STAGEXT.ToLower(); + + public bool IsValidExtension { get; private set; } = true; + public bool HasSkipNextStatement { get; private set; } + + public ExtensionFile(string filename) { + this.FileName = filename; + + FillNameAndType(); + ReadContent(); + + CheckIfHasVersion(); + CheckIfHasSkipNext(); + } + + + + private void FillNameAndType() { + string fileName = FileName.Substring(FileName.LastIndexOf(@"\") + 1); + Match m = Regex.Match(fileName, FILENAMEMATCH); + if (m.Success) { + Type = m.Groups[1].Value; + Name = m.Groups[2].Value; + } else { + IsValidExtension = false; + } + } + + public void ReadContent() { + Content = File.ReadAllText(FileName); + } + + public void CheckIfHasVersion() { + if (!Regex.IsMatch(Content, VERSIONMATCH)) IsValidExtension = false; + } + public void CheckIfHasSkipNext() { + HasSkipNextStatement = Regex.IsMatch(Content, SKIPNEXT) ? true : false; + } + + } +} diff --git a/ETLBox/Definitions/Extensions/ExtensionFileLoader.cs b/ETLBox/Definitions/Extensions/ExtensionFileLoader.cs new file mode 100644 index 00000000..15e9f2c1 --- /dev/null +++ b/ETLBox/Definitions/Extensions/ExtensionFileLoader.cs @@ -0,0 +1,28 @@ +using System; +using System.Collections.Generic; +using System.IO; + +namespace ALE.ETLBox { + public class ExtensionFileLoader { + public const string STAGEXT = "STAGEXT"; + public const string FILESUFFIX = ".sql"; + + public static string ExtensionScriptsFolder = ""; + + public static bool ExistsFolder => !String.IsNullOrEmpty(ExtensionScriptsFolder); + public static List GetSTAGEXTFiles() => GetExtensionFiles(STAGEXT); + public static List GetExtensionFiles(string name) { + List result = new List(); + if (ExistsFolder) { + foreach (string fileName in Directory.GetFiles(Path.GetFullPath(ExtensionScriptsFolder), $"{name}_*{FILESUFFIX}", SearchOption.TopDirectoryOnly)) { + ExtensionFile file = new ExtensionFile(fileName); + if (fileName.ToLower().EndsWith("_example.sql")) continue; + if (file.IsValidExtension) + result.Add(file); + } + } + + return result; + } + } +} diff --git a/ETLBox/Definitions/General/ConnectionString.cs b/ETLBox/Definitions/General/ConnectionString.cs new file mode 100644 index 00000000..885bb4cd --- /dev/null +++ b/ETLBox/Definitions/General/ConnectionString.cs @@ -0,0 +1,83 @@ +using System.Text.RegularExpressions; + +namespace ALE.ETLBox { + public class ConnectionString { + static string PATTERNBEGIN = $@"(.*)("; + static string PATTERNEND = $@"\s*=\s*)(.*?)(;|$)(.*)"; + static string DATASOURCE = $@"{PATTERNBEGIN}Data Source{PATTERNEND}"; + static string INITIALCATALOG = $@"{PATTERNBEGIN}Initial Catalog{PATTERNEND}"; + static string PROVIDER = $@"{PATTERNBEGIN}Provider{PATTERNEND}"; + static string CURRENTLANGUAGE = $@"{PATTERNBEGIN}Current Language{PATTERNEND}"; + static string AUTOTRANSLATE = $@"{PATTERNBEGIN}Auto Translate{PATTERNEND}"; + + static string VALIDCONNECTIONSTRING = @"[\w\s]+=([\w\s-_.+*&%$#&!§]+|"".*? "")(;|$)"; //Attention: double quotes in Regex are quoted with double quotes + + + string _ConnectionString; + public string Value { + get { + return _ConnectionString; + } + set { + _ConnectionString = RemovePatternIfExists(value, PROVIDER, CURRENTLANGUAGE, AUTOTRANSLATE); + CatalogName = ReplaceIfMatch(value, INITIALCATALOG, "${3}"); + ServerName = ReplaceIfMatch(value, DATASOURCE, "${3}"); + } + } + + public bool IsValid { + get { + return Regex.IsMatch(_ConnectionString, VALIDCONNECTIONSTRING); + } + } + + public string ServerName { get; set; } + + public string CatalogName { get; set; } + + public ConnectionString() { + + } + public ConnectionString(string connectionString) { + this.Value = connectionString; + } + + public ConnectionString GetMasterConnection() { + return new ConnectionString(ReplaceIfMatch(Value, INITIALCATALOG, "${1}${2}master${4}${5}")); + } + + public ConnectionString GetConnectionWithoutCatalog() { + return new ConnectionString(ReplaceIfMatch(Value, INITIALCATALOG, "${1}${5}")); + } + + public ConnectionString ChangeCatalog(string dbName) { + return new ConnectionString(GetConnectionWithoutCatalog().Value + + (Value.EndsWith(";") ? "" : ";") + + $"Initial Catalog={dbName}"); + } + + public static implicit operator ConnectionString(string v) { + return new ConnectionString(v); + } + + public override string ToString() { + return Value; + } + + string RemovePatternIfExists(string v, params string[] patterns) { + string result = v; + foreach (string pattern in patterns) { + if (Regex.IsMatch(result, pattern)) + result = Regex.Replace(result, pattern, "${1}${5}", RegexOptions.IgnoreCase); + } + return result; + } + + string ReplaceIfMatch(string input, string pattern, string replacement) { + if (Regex.IsMatch(input, pattern, RegexOptions.IgnoreCase)) + return Regex.Replace(input, pattern, replacement, RegexOptions.IgnoreCase); + else + return input; + } + } +} diff --git a/ETLBox/Definitions/Logging/LoadProcess.cs b/ETLBox/Definitions/Logging/LoadProcess.cs new file mode 100644 index 00000000..241679d7 --- /dev/null +++ b/ETLBox/Definitions/Logging/LoadProcess.cs @@ -0,0 +1,19 @@ +using System; + +namespace ALE.ETLBox { + public class LoadProcess { + public int? LoadProcessKey { get; set; } + public DateTime StartDate { get; set; } + public DateTime? TransferCompletedDate { get; set; } + public DateTime? EndDate { get; set; } + public string ProcessName { get; set; } + public string StartMessage { get; set; } + public bool IsRunning { get; set; } + public string EndMessage { get; set; } + public bool WasSuccessful { get; set; } + public string AbortMessage { get; set; } + public bool WasAborted { get; set; } + public bool IsFinished { get; set; } + public bool IsTransferCompleted { get; set; } + } +} diff --git a/ETLBox/Definitions/Logging/LogEntry.cs b/ETLBox/Definitions/Logging/LogEntry.cs new file mode 100644 index 00000000..1c45682b --- /dev/null +++ b/ETLBox/Definitions/Logging/LogEntry.cs @@ -0,0 +1,45 @@ +using Newtonsoft.Json; +using System; +using System.Collections.Generic; +using System.Diagnostics; + +namespace ALE.ETLBox { + [DebuggerDisplay("#{LogKey} {TaskType} - {TaskAction} {LogDate}")] + public class LogEntry { + public int LogKey { get; set; } + public DateTime LogDate { get; set; } + public DateTime StartDate => LogDate; + public DateTime? EndDate {get;set;} + public string Level { get; set; } + public string Message { get; set; } + public string TaskType { get; set; } + public string TaskAction { get; set; } + public string TaskHash { get; set; } + public string Stage { get; set; } + public string Source { get; set; } + public int? LoadProcessKey { get; set; } + } + + [DebuggerDisplay("#{LogKey} {TaskType} {Message} - {TaskAction} {LogDate}")] + public class LogHierarchyEntry : LogEntry{ + public List Children { get; set; } + [JsonIgnore] + public LogHierarchyEntry Parent { get; set; } + public LogHierarchyEntry() { + Children = new List(); + } + public LogHierarchyEntry(LogEntry entry) : this(){ + this.LogKey = entry.LogKey; + this.LogDate = entry.LogDate; + this.EndDate = entry.EndDate; + this.Level = entry.Level; + this.Message = entry.Message; + this.TaskType = entry.TaskType; + this.TaskAction = entry.TaskAction; + this.TaskHash = entry.TaskHash; + this.Stage = entry.Stage; + this.Source = entry.Source; + this.LoadProcessKey = entry.LoadProcessKey; + } + } +} diff --git a/ETLBox/Definitions/Tasks/DbTask.cs b/ETLBox/Definitions/Tasks/DbTask.cs new file mode 100644 index 00000000..7ce63efc --- /dev/null +++ b/ETLBox/Definitions/Tasks/DbTask.cs @@ -0,0 +1,236 @@ +using System; +using System.Collections.Generic; +using System.Data; +using System.Linq; + +namespace ALE.ETLBox { + public abstract class DbTask : GenericTask { + + /* Public Properties */ + public string Sql { get; set; } + public FileConnectionManager FileConnection { get; set; } + public List> Actions { get; set; } + public Action BeforeRowReadAction { get; set; } + public Action AfterRowReadAction { get; set; } + Action InternalBeforeRowReadAction { get; set; } + Action InternalAfterRowReadAction { get; set; } + public long ReadTopX { get; set; } = long.MaxValue; + public int? RowsAffected { get; private set; } + + public bool DisableExtension { get; set; } + public string Command { + get { + if (HasSql) + return HasName ? NameAsComment + Sql : Sql; + else if (HasFileConnection) { + if (FileConnection.FileExists) + return HasName ? NameAsComment + FileConnection.ReadContent() : FileConnection.ReadContent(); + else { + NLogger.Warn($"Sql file was not found: {FileConnection.FileName}", TaskType, "RUN", TaskHash, ControlFlow.STAGE); + return $"SELECT 'File {FileConnection.FileName} not found'"; + } + } + else + throw new Exception("Empty command"); + } + } + + /* Internal/Private properties */ + internal bool DoSkipSql { get; private set; } + NLog.Logger NLogger { get; set; } + bool HasSql => !(String.IsNullOrWhiteSpace(Sql)); + bool HasFileConnection => FileConnection != null; + + /* Some constructors */ + public DbTask() { + NLogger = NLog.LogManager.GetLogger("ETL"); + } + + public DbTask(string name) : this() { + this.TaskName = name; + } + + public DbTask(string name, string sql) : this(name) { + this.Sql = sql; + } + + public DbTask(ITask callingTask, string sql) : this() { + TaskName = callingTask.TaskName; + TaskHash = callingTask.TaskHash; + ConnectionManager = callingTask.ConnectionManager; + TaskType = callingTask.TaskType; + DisableLogging = callingTask.DisableLogging; + this.Sql = sql; + } + + public DbTask(string name, string sql, params Action[] actions) : this(name, sql) { + Actions = actions.ToList(); + } + + public DbTask(string name, string sql, Action beforeRowReadAction, Action afterRowReadAction, params Action[] actions) : this(name, sql) { + BeforeRowReadAction = beforeRowReadAction; + AfterRowReadAction = afterRowReadAction; + Actions = actions.ToList(); + } + + public DbTask(string name, FileConnectionManager fileConnection) : this(name) { + this.FileConnection = fileConnection; + } + + /* Public methods */ + public int ExecuteNonQuery() { + using (var conn= DbConnectionManager.Clone()) { + conn.Open(); + QueryStart(); + RowsAffected = DoSkipSql ? 0 : conn.ExecuteNonQuery(Command);//DbConnectionManager.ExecuteNonQuery(Command); + QueryFinish(LogType.Rows); + } + return RowsAffected ?? 0; + } + + public object ExecuteScalar() { + object result = null; + using (var conn = DbConnectionManager.Clone()) { + conn.Open(); + QueryStart(); + result = conn.ExecuteScalar(Command); + QueryFinish(); + } + return result; + } + + public Nullable ExecuteScalar() where T : struct { + object result = ExecuteScalar(); + if (result == null || result == DBNull.Value) + return null; + else + return ((T)result); + } + + + public bool ExecuteScalarAsBool() { + int? result = ExecuteScalar(); + return IntToBool(result); + } + + public void ExecuteReader() { + using (var conn = DbConnectionManager.Clone()) { + conn.Open(); + QueryStart(); + IDataReader reader = conn.ExecuteReader(Command) as IDataReader; + for (int rowNr = 0; rowNr < ReadTopX; rowNr++) { + if (reader.Read()) { + InternalBeforeRowReadAction?.Invoke(); + BeforeRowReadAction?.Invoke(); + for (int i = 0; i < Actions?.Count; i++) { + if (!reader.IsDBNull(i)) { + Actions?[i]?.Invoke(reader.GetValue(i)); + } else { + Actions?[i]?.Invoke(null); + } + } + AfterRowReadAction?.Invoke(); + InternalAfterRowReadAction?.Invoke(); + } + else { + break; + } + } + reader.Close(); + QueryFinish(); + } + } + + internal List Query(Action doWithRowAction, bool saveResult = true) where T : new() { + List result = null; + if (saveResult) result = new List(); + Actions = new List>(); + T row = default(T); + foreach (var propInfo in typeof(T).GetProperties()) { + Actions.Add(colValue => propInfo.SetValue(row, colValue)); + } + InternalBeforeRowReadAction = () => row = new T(); + if (saveResult) InternalAfterRowReadAction = () => result.Add(row); + else InternalAfterRowReadAction = () => doWithRowAction(row); + ExecuteReader(); + Actions = null; + return result; + } + + public List Query() where T : new() => Query(null,true); + public void Query(Action doWithRowAction) where T : new() => Query(doWithRowAction, false); + + + public void BulkInsert(IDataReader data, IColumnMappingCollection columnMapping, string tableName) { + using (var conn = DbConnectionManager.Clone()) { + conn.Open(); + QueryStart(LogType.Bulk); + conn.BulkInsert(data, columnMapping, tableName); + RowsAffected = data.RecordsAffected; + QueryFinish(LogType.Bulk); + } + } + + + /* Private implementation & stuff */ + enum LogType { + None, + Rows, + Bulk + } + + static bool IntToBool(int? result) { + if (result != null && result > 0) + return true; + else + return false; + } + + void QueryStart(LogType logType = LogType.None) { + if (!DisableLogging) + LoggingStart(logType); + + if (!DisableExtension) + ExecuteExtension(); + } + + void QueryFinish(LogType logType = LogType.None) { + if (!DisableLogging) + LoggingEnd(logType); + } + + void LoggingStart(LogType logType) { + NLogger.Info(TaskName, TaskType, "START", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + if (logType == LogType.Bulk) + NLogger.Debug($"SQL Bulk Insert", TaskType, "RUN", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + else + NLogger.Debug($"{Command}", TaskType, "RUN", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + void LoggingEnd(LogType logType) { + NLogger.Info(TaskName, TaskType, "END", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + if (logType == LogType.Rows) + NLogger.Debug($"Rows affected: {RowsAffected ?? 0}", TaskType, "RUN", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + void ExecuteExtension() { + if (ExtensionFileLoader.ExistsFolder && HasName) { + List extFiles = ExtensionFileLoader.GetExtensionFiles(TaskHash); + + if (extFiles.Count > 0) { + foreach (var extFile in extFiles) { + new SqlTask($"Extensions: {extFile.Name}", new FileConnectionManager(extFile.FileName)) { + ConnectionManager = this.ConnectionManager, + DisableExtension = true + }.ExecuteNonQuery(); + } + DoSkipSql = extFiles.Any(ef => ef.HasSkipNextStatement); + } + } + } + + + } + + +} diff --git a/ETLBox/Definitions/Tasks/GenericTask.cs b/ETLBox/Definitions/Tasks/GenericTask.cs new file mode 100644 index 00000000..66f2dcca --- /dev/null +++ b/ETLBox/Definitions/Tasks/GenericTask.cs @@ -0,0 +1,84 @@ +using System; + +namespace ALE.ETLBox { + public abstract class GenericTask : ITask { + public virtual string TaskType { get; set; } = "N/A"; + public virtual string TaskName { get; set; } = "N/A"; + public virtual void Execute() { + throw new Exception("Not implemented!"); + } + + public virtual IConnectionManager ConnectionManager { get; set; } + //IConnectionManager _connectionManager; + //public IConnectionManager ConnectionManager { + // get { + // if (_connectionManager == null && ControlFlow.CurrentDbConnection != null) + // return ControlFlow.CurrentDbConnection; + // else + // return _connectionManager; + // } + // set { + // _connectionManager = value; + // } + //} + internal virtual IDbConnectionManager DbConnectionManager { + get { + if (ConnectionManager == null) { + if (UseAdomdConnection && ControlFlow.CurrentAdomdConnection != null) + return (IDbConnectionManager)ControlFlow.CurrentAdomdConnection; + if (ControlFlow.CurrentDbConnection != null) + return (IDbConnectionManager)ControlFlow.CurrentDbConnection; + else + return null; + } + else + return (IDbConnectionManager)ConnectionManager; + } + } + + internal virtual ICubeConnectionManager ASConnectionManager { + get { + if (ConnectionManager == null) { + if (ControlFlow.CurrentASConnection != null) + return ControlFlow.CurrentASConnection as ICubeConnectionManager; + else + return null; + } + return ConnectionManager as ICubeConnectionManager; + } + } + + public bool _disableLogging; + public virtual bool DisableLogging { + get { + if (ControlFlow.DisableAllLogging == false) + return _disableLogging; + else + return ControlFlow.DisableAllLogging; + } + set { + _disableLogging = value; + } + } + + private string _taskHash; + public virtual string TaskHash { + get { + if (_taskHash == null) + return HashHelper.Encrypt_Char40(this); + else + return _taskHash; + } + set { + _taskHash = value; + } + } + internal virtual bool HasName => !String.IsNullOrWhiteSpace(TaskName); + internal virtual string NameAsComment => CommentStart + TaskName + CommentEnd + Environment.NewLine; + private string CommentStart => DoXMLCommentStyle ? @"" : "*/"; + public virtual bool DoXMLCommentStyle { get; set; } + internal virtual bool UseAdomdConnection { get; set; } + + } +} diff --git a/ETLBox/Definitions/Tasks/ITask.cs b/ETLBox/Definitions/Tasks/ITask.cs new file mode 100644 index 00000000..a213cd7c --- /dev/null +++ b/ETLBox/Definitions/Tasks/ITask.cs @@ -0,0 +1,11 @@ +namespace ALE.ETLBox { + public interface ITask + { + string TaskName { get; } + string TaskType { get; } + string TaskHash { get; } + IConnectionManager ConnectionManager { get; } + bool DisableLogging { get; } + void Execute(); + } +} diff --git a/ETLBox/ETLBox.csproj b/ETLBox/ETLBox.csproj new file mode 100644 index 00000000..7485225e --- /dev/null +++ b/ETLBox/ETLBox.csproj @@ -0,0 +1,174 @@ + + + + + Debug + AnyCPU + {0C7548BA-A597-488A-8ADD-2F628BE691D6} + Library + Properties + ALE.ETLBox + ETLBox + v4.6.1 + 512 + + + + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + + + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + + ..\packages\CsvHelper.2.16.3.0\lib\net45\CsvHelper.dll + + + False + ..\Libs\Microsoft.AnalysisServices.dll + + + False + ..\Libs\Microsoft.AnalysisServices.AdomdClient.dll + + + False + ..\Libs\Microsoft.AnalysisServices.Core.dll + + + False + ..\Libs\Microsoft.SqlServer.ConnectionInfo.dll + + + False + ..\Libs\Microsoft.SqlServer.Smo.dll + + + ..\packages\Newtonsoft.Json.11.0.2\lib\net45\Newtonsoft.Json.dll + True + + + ..\packages\NLog.4.4.13\lib\net45\NLog.dll + + + + + ..\packages\Microsoft.Tpl.Dataflow.4.5.24\lib\portable-net45+win8+wpa81\System.Threading.Tasks.Dataflow.dll + True + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/ETLBox/Helper/DataTypeConverter.cs b/ETLBox/Helper/DataTypeConverter.cs new file mode 100644 index 00000000..6c8f7f23 --- /dev/null +++ b/ETLBox/Helper/DataTypeConverter.cs @@ -0,0 +1,60 @@ +using System; +using System.Text.RegularExpressions; + +namespace ALE.ETLBox { + public class DataTypeConverter { + public const int DefaultTinyIntegerLength = 5; + public const int DefaultSmallIntegerLength = 7; + public const int DefaultIntegerLength = 11; + public const int DefaultBigIntegerLength = 21; + public const int DefaultDateTime2Length = 41; + public const int DefaultDateTimeLength = 27; + public const int DefaultDecimalLength = 41; + + public const string _REGEX = @"(.*?)char\((\d*)\)(.*?)"; + + public static int GetTypeLength(string dataTypeString) { + switch (dataTypeString) { + case "tinyint": return DefaultTinyIntegerLength; + case "smallint": return DefaultSmallIntegerLength; + case "int": return DefaultIntegerLength; + case "bigint": return DefaultBigIntegerLength; + case "decimal": return DefaultDecimalLength; + case "datetime": return DefaultDateTimeLength; + case "datetime2": return DefaultDateTime2Length; + default: + if (IsCharTypeDefinition(dataTypeString)) + return GetStringLengthFromCharString(dataTypeString); + else + throw new Exception("Unknown data type"); + } + } + + public static bool IsCharTypeDefinition(string value) { + return new Regex(_REGEX).IsMatch(value); + } + + public static int GetStringLengthFromCharString(string value) { + string result = Regex.Replace(value, _REGEX, "${2}"); + return int.Parse(result); + } + + public static string GetObjectTypeString(string dataTypeString) { + switch (dataTypeString) { + + case "tinyint": return "System.UInt16"; + case "smallint": return "System.Int16"; + case "int": return "System.Int32"; + case "bigint": return "System.Int64"; + case "decimal": return "System.Decimal"; + case "datetime": return "System.DateTime"; + case "datetime2": return "System.DateTime"; + default: return "System.String"; + } + } + + public static Type GetTypeObject(string dataTypeString) { + return Type.GetType(GetObjectTypeString(dataTypeString)); + } + } +} diff --git a/ETLBox/Helper/HashHelper.cs b/ETLBox/Helper/HashHelper.cs new file mode 100644 index 00000000..78780827 --- /dev/null +++ b/ETLBox/Helper/HashHelper.cs @@ -0,0 +1,22 @@ +using System.Security.Cryptography; +using System.Text; + + +namespace ALE.ETLBox { + public static class HashHelper { + public static string Encrypt_Char40(string text) { + if (text != null) { + string hex = ""; + byte[] hashValue = new SHA1Managed().ComputeHash(Encoding.UTF8.GetBytes(text)); + foreach (byte hashByte in hashValue) + hex += hashByte.ToString("x2"); + return hex.ToUpper(); + } + else + return ""; + } + + public static string Encrypt_Char40(ITask task) => Encrypt_Char40(task.TaskName + "|" + task.TaskType); + public static string Encrypt_Char40(ITask task, string id) => Encrypt_Char40(task.TaskName + "|" + task.TaskType + "|" + id); + } +} diff --git a/ETLBox/Helper/RuntimePolicyHelper.cs b/ETLBox/Helper/RuntimePolicyHelper.cs new file mode 100644 index 00000000..934532fd --- /dev/null +++ b/ETLBox/Helper/RuntimePolicyHelper.cs @@ -0,0 +1,66 @@ +using System; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; +//using Microsoft.AnalysisServices.Tabular; +//using Microsoft.AnalysisServices.Xmla; + +namespace ALE.ETLBox { + public static class RuntimePolicyHelper { + public static bool LegacyV2RuntimeEnabledSuccessfully { get; private set; } + + //public static ManagedBatchParser.Parser Parser_OnlyToIncludeBatchParserDLL { get; set; } + //public static Microsoft.AnalysisServices.Xmla.XmlaClient XMLAClient_OnlyToIncludeDLL { get; set; } + //public static Microsoft.AnalysisServices.Tabular.Culture Culture_OnlyToIncludeDLL { get; set; } + + public static void SetNET20Compatibilty() { + //IncludeLibrariesInBuildButNotUsedInCode(); + if (!RuntimePolicyHelper.LegacyV2RuntimeEnabledSuccessfully) + throw new Exception("Could not load SMO (compatibility level could not be set to .net 2.0"); + } + + //private static void IncludeLibrariesInBuildButNotUsedInCode() + //{ + // Parser_OnlyToIncludeBatchParserDLL = new ManagedBatchParser.Parser(); + // Parser_OnlyToIncludeBatchParserDLL = null; + // XMLAClient_OnlyToIncludeDLL = new XmlaClient(); + // XMLAClient_OnlyToIncludeDLL = null; + // Culture_OnlyToIncludeDLL = new Culture(); + // Culture_OnlyToIncludeDLL = null; + //} + + static RuntimePolicyHelper() { + ICLRRuntimeInfo clrRuntimeInfo = + (ICLRRuntimeInfo)RuntimeEnvironment.GetRuntimeInterfaceAsObject( + Guid.Empty, + typeof(ICLRRuntimeInfo).GUID); + try { + clrRuntimeInfo.BindAsLegacyV2Runtime(); + LegacyV2RuntimeEnabledSuccessfully = true; + } + catch (COMException) { + // This occurs with an HRESULT meaning + // "A different runtime was already bound to the legacy CLR version 2 activation policy." + LegacyV2RuntimeEnabledSuccessfully = false; + } + } + + [ComImport] + [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] + [Guid("BD39D1D2-BA2F-486A-89B0-B4B0CB466891")] + private interface ICLRRuntimeInfo { + void xGetVersionString(); + void xGetRuntimeDirectory(); + void xIsLoaded(); + void xIsLoadable(); + void xLoadErrorString(); + void xLoadLibrary(); + void xGetProcAddress(); + void xGetInterface(); + void xSetDefaultStartupFlags(); + void xGetDefaultStartupFlags(); + + [MethodImpl(MethodImplOptions.InternalCall, MethodCodeType = MethodCodeType.Runtime)] + void BindAsLegacyV2Runtime(); + } + } +} diff --git a/ETLBox/Helper/SqlHelper.cs b/ETLBox/Helper/SqlHelper.cs new file mode 100644 index 00000000..3a8ebc6e --- /dev/null +++ b/ETLBox/Helper/SqlHelper.cs @@ -0,0 +1,58 @@ +using System; + +namespace ALE.ETLBox { + public class SqlHelper { + public static string Headline1(string headline) { + return string.Format(@" +------------------------------------------------------------ +-- {0} +------------------------------------------------------------ +" + , headline.Replace(Environment.NewLine, Environment.NewLine + "--")); + } + + public static string Headline2(string headline) { + return string.Format(@" +/*** +{0} +***/ +", headline); + } + + public static string Comment(string comment) { + return string.Format(@"-- {0}", comment); + } + + + + public static string DropTablesIfExists(params string[] tableNames) { + string sql = ""; + foreach (string name in tableNames) { + string objectName = name.StartsWith("#") ? "tempdb.." + name: name; + sql += $@"if object_id('{objectName}') is not null drop table {name}" + Environment.NewLine; + } + return sql; + } + + + + public static string Sequence(string sequenceName, string schema, int increment, string tableName, string keyName) { + string sequence = ""; + sequence = $@"declare @MaxID{sequenceName} bigint +declare @sql{sequenceName} nvarchar(4000) +if object_id('{schema}.{sequenceName}') is not null drop sequence {schema}.{sequenceName} +select @MaxID{sequenceName} = isnull(max({keyName}),0) from {tableName} +set @sql{sequenceName} = N'create sequence {schema}.{sequenceName} + start with ' + cast((@MaxID{sequenceName}+{increment}) as nvarchar(50)) +' + increment by {increment};' +execute sp_executesql @sql{sequenceName} +"; + + return sequence; + } + + + + + } +} diff --git a/ETLBox/Helper/SqlHelperExtensions.cs b/ETLBox/Helper/SqlHelperExtensions.cs new file mode 100644 index 00000000..30369d3e --- /dev/null +++ b/ETLBox/Helper/SqlHelperExtensions.cs @@ -0,0 +1,16 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace ALE.ETLBox { + public static class SqlHelperExtensions { + public static string AsString(this ITableColumn column, string tblName = "", string prefix = "", string suffix = "") => + (tblName != "" ? tblName + "." : "") + "[" + prefix + column.Name + suffix + "]"; + + public static string AsString(this IEnumerable columns, string tblName = "", string prefix = "", string suffix = "") => + string.Join(", ", columns.Select(col => col.AsString(tblName, prefix, suffix))); + + public static string AsStringWithNewLine(this IEnumerable columns, string tblName = "", string prefix = "", string suffix = "") => + string.Join(Environment.NewLine + ",", columns.Select(col => col.AsString(tblName, prefix, suffix))); + } +} diff --git a/ETLBox/Helper/StringExtension.cs b/ETLBox/Helper/StringExtension.cs new file mode 100644 index 00000000..3a66ec64 --- /dev/null +++ b/ETLBox/Helper/StringExtension.cs @@ -0,0 +1,5 @@ +namespace ALE.ETLBox { + public static class StringExtension { + public static string NullOrSqlString(this string s) => s == null ? "null" : $"'{s.Replace("'","''")}'"; + } +} diff --git a/ETLBox/NLog/ETLLogLayoutRenderer.cs b/ETLBox/NLog/ETLLogLayoutRenderer.cs new file mode 100644 index 00000000..a9774706 --- /dev/null +++ b/ETLBox/NLog/ETLLogLayoutRenderer.cs @@ -0,0 +1,28 @@ +using NLog; +using NLog.Config; +using NLog.LayoutRenderers; +using System.Text; + +namespace ALE.ETLBox { + [LayoutRenderer("etllog")] + public class ETLLogLayoutRenderer : LayoutRenderer { + [DefaultParameter] + public string LogType { get; set; } = "message"; + + protected override void Append(StringBuilder builder, LogEventInfo logEvent) { + if (LogType?.ToLower() == "message") + builder.Append(logEvent.Message); + else if (LogType?.ToLower() == "type" && logEvent?.Parameters?.Length >= 1) + builder.Append(logEvent.Parameters[0]); + else if (LogType?.ToLower() == "action" && logEvent?.Parameters?.Length >= 2) + builder.Append(logEvent.Parameters[1]); + else if (LogType?.ToLower() == "hash" && logEvent?.Parameters?.Length >= 3) + builder.Append(logEvent.Parameters[2]); + else if (LogType?.ToLower() == "stage" && logEvent?.Parameters?.Length >= 4) + builder.Append(logEvent.Parameters[3]); + else if (LogType?.ToLower() == "loadprocesskey" && logEvent?.Parameters?.Length >= 5) + builder.Append(logEvent.Parameters[4]); + } + + } +} diff --git a/ETLBox/Properties/AssemblyInfo.cs b/ETLBox/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..4dc687b2 --- /dev/null +++ b/ETLBox/Properties/AssemblyInfo.cs @@ -0,0 +1,35 @@ +using System.Reflection; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("ETLBox")] +[assembly: AssemblyDescription("It's all in the box! Run all your ETL jobs with this awesome C# class library.")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("ALE")] +[assembly: AssemblyProduct("ETLBox")] +[assembly: AssemblyCopyright("Copyright © Andreas Lennartz 2018")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("0c7548ba-a597-488a-8add-2f628be691d6")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("0.2.0.0")] +[assembly: AssemblyFileVersion("0.2.0.0")] diff --git a/ETLBox/Toolbox/ConnectionManager/ASConnectionManager.cs b/ETLBox/Toolbox/ConnectionManager/ASConnectionManager.cs new file mode 100644 index 00000000..896d30ea --- /dev/null +++ b/ETLBox/Toolbox/ConnectionManager/ASConnectionManager.cs @@ -0,0 +1,122 @@ +using System; +using AS = Microsoft.AnalysisServices; + +namespace ALE.ETLBox { + public class ASConnectionManager : ICubeConnectionManager, IDisposable { + public ConnectionString ConnectionString { get; set; } + public bool DoParallelProcessing { get; set; } = true; + public bool IsConnectionOpen => Server?.Connected ?? false; + public string ErrorMessages { get; set; } = ""; + public bool IgnoreErrors { get; set; } + + public ASConnectionManager() { + Server = new AS.Server(); + } + + public ASConnectionManager(ConnectionString connectionString) : this() { + this.ConnectionString = connectionString; + } + + public void Open() { + if (!IsConnectionOpen) { + Server = new AS.Server(); + Server.Connect(ConnectionString.GetConnectionWithoutCatalog().Value); + + } + } + + public void Process() { + //Open(); + FindDatabase(); + //TODO ErrorConfiguration has no effect when capture log is enabled!!! The workaround is to do a "normal processing" (always sequential?) + if (IgnoreErrors) + Database?.Process(AS.ProcessType.ProcessFull, GetErrorConfiguration()); + else { + EnabledCaptureLog(); + Database?.Process(AS.ProcessType.ProcessFull); + ExecuteAndCheckErrorLog(); + } + } + + public void DropIfExists() { + //Open(); + FindDatabase(); + Database?.Drop(AS.DropOptions.IgnoreFailures); + } + + internal AS.Server Server { get; set; } + internal AS.Database Database { get; set; } + internal AS.Cube Cube { get; set; } + + void FindDatabase() { + Database = Server.Databases.FindByName(ConnectionString.CatalogName); + if (Database == null) + ErrorMessages += $"Can't find cube database with name {ConnectionString.CatalogName}" + Environment.NewLine; + } + + void EnabledCaptureLog() { + Server.CaptureXml = true; + } + + void ExecuteAndCheckErrorLog() { + AS.XmlaResultCollection resultCol = Server.ExecuteCaptureLog(true, DoParallelProcessing, true); + if (resultCol.ContainsErrors) { + ErrorMessages += $"Errors occured in cube {ConnectionString.CatalogName}:" + Environment.NewLine; + foreach (AS.XmlaResult result in resultCol) { + foreach (object error in result.Messages) { + if (error.GetType() == typeof(AS.XmlaError)) + ErrorMessages += "ERR: " + ((AS.XmlaError)error).Description + Environment.NewLine; + else if (error.GetType() == typeof(AS.XmlaWarning)) + ErrorMessages += "WARN: " + ((AS.XmlaWarning)error).Description + Environment.NewLine; + } + } + throw new Exception(ErrorMessages); + } + } + + AS.ErrorConfiguration GetErrorConfiguration() { + AS.ErrorConfiguration err = new AS.ErrorConfiguration(); + err.KeyErrorAction = AS.KeyErrorAction.DiscardRecord; + err.KeyErrorLimitAction = AS.KeyErrorLimitAction.StopLogging; + err.CalculationError = AS.ErrorOption.IgnoreError; + err.KeyDuplicate = AS.ErrorOption.IgnoreError; + err.KeyNotFound = AS.ErrorOption.IgnoreError; + err.NullKeyConvertedToUnknown = AS.ErrorOption.IgnoreError; + err.NullKeyNotAllowed = AS.ErrorOption.IgnoreError; + err.KeyErrorLimit = -1; + return err; + + } + + + #region IDisposable Support + private bool disposedValue = false; // To detect redundant calls + + protected void Dispose(bool disposing) { + if (!disposedValue) { + if (disposing) { + if (Server != null) + Server.Dispose(); + Server = null; + } + disposedValue = true; + } + } + + public void Dispose() => Dispose(true); + public void Close() => Dispose(); + + #endregion + + public ICubeConnectionManager Clone() { + ASConnectionManager clone = new ASConnectionManager(ConnectionString) { + DoParallelProcessing = this.DoParallelProcessing, + IgnoreErrors = this.IgnoreErrors + }; + return clone; + + } + + + } +} diff --git a/ETLBox/Toolbox/ConnectionManager/AdomdConnectionManager.cs b/ETLBox/Toolbox/ConnectionManager/AdomdConnectionManager.cs new file mode 100644 index 00000000..666b6f0a --- /dev/null +++ b/ETLBox/Toolbox/ConnectionManager/AdomdConnectionManager.cs @@ -0,0 +1,25 @@ +using Microsoft.AnalysisServices.AdomdClient; +using System; +using System.Data; + +namespace ALE.ETLBox { + public class AdomdConnectionManager : DbConnectionManager { + + public AdomdConnectionManager() : base() { } + + public AdomdConnectionManager(ConnectionString connectionString) : base(connectionString) { } + + public override void BulkInsert(IDataReader data, IColumnMappingCollection columnMapping, string tableName) { + throw new NotImplementedException(); + } + + public override IDbConnectionManager Clone() { + AdomdConnectionManager clone = new AdomdConnectionManager(ConnectionString) { + MaxLoginAttempts = this.MaxLoginAttempts + }; + return clone; + + } + + } +} diff --git a/ETLBox/Toolbox/ConnectionManager/FileConnectionManager.cs b/ETLBox/Toolbox/ConnectionManager/FileConnectionManager.cs new file mode 100644 index 00000000..8705df1e --- /dev/null +++ b/ETLBox/Toolbox/ConnectionManager/FileConnectionManager.cs @@ -0,0 +1,38 @@ +using System; +using System.IO; + +namespace ALE.ETLBox { + public class FileConnectionManager { + internal string FileName { get; set; } + + internal string Path { get; set; } + + internal string FullFileName { + get { + if (HasPath) + return System.IO.Path.Combine(Path, FileName); + else + return FileName; + } + } + + internal bool HasPath => !(String.IsNullOrWhiteSpace(Path)); + + public bool FileExists => File.Exists(FullFileName); + + public FileConnectionManager(string filename) { + this.FileName = filename; + } + + public FileConnectionManager(string path, string filename) : this(filename) { + this.Path = path; + } + + internal string ReadContent() { + if (FileExists) + return File.ReadAllText(FullFileName); + else + return String.Empty; + } + } +} diff --git a/ETLBox/Toolbox/ConnectionManager/SMOConnectionManager.cs b/ETLBox/Toolbox/ConnectionManager/SMOConnectionManager.cs new file mode 100644 index 00000000..f55ce070 --- /dev/null +++ b/ETLBox/Toolbox/ConnectionManager/SMOConnectionManager.cs @@ -0,0 +1,77 @@ +using Microsoft.SqlServer.Management.Common; +using Microsoft.SqlServer.Management.Smo; +using System; +using System.Data; + +namespace ALE.ETLBox { + public class SMOConnectionManager : IDbConnectionManager, IDisposable { + public ConnectionString ConnectionString { get; set; } + public bool IsConnectionOpen => SqlConnectionManager.DbConnection?.State == ConnectionState.Open; + + public SMOConnectionManager(ConnectionString connectionString) { + RuntimePolicyHelper.SetNET20Compatibilty(); + ConnectionString = connectionString; + SqlConnectionManager = new SqlConnectionManager(connectionString); + } + + internal Server Server { get; set; } + internal ServerConnection Context => Server.ConnectionContext; + internal SqlConnectionManager SqlConnectionManager { get; set; } + internal ServerConnection OpenedContext { + get { + if (!IsConnectionOpen) + Open(); + return Context; + } + } + + public void Open() { + SqlConnectionManager = new SqlConnectionManager(ConnectionString); + SqlConnectionManager.Open(); + Server = new Server(new ServerConnection(SqlConnectionManager.DbConnection)); + Context.StatementTimeout = 0; + } + + public int ExecuteNonQuery(string command) { + return OpenedContext.ExecuteNonQuery(command); + } + + public object ExecuteScalar(string command) { + return OpenedContext.ExecuteScalar(command); + } + + public IDataReader ExecuteReader(string command) { + return OpenedContext.ExecuteReader(command); + } + + public void BulkInsert(IDataReader data, IColumnMappingCollection columnMapping, string tableName) + => SqlConnectionManager.BulkInsert(data, columnMapping, tableName); + + private bool disposedValue = false; // To detect redundant calls + protected void Dispose(bool disposing) { + if (!disposedValue) { + if (disposing) { + Server?.ConnectionContext?.Disconnect(); + if (SqlConnectionManager != null) + SqlConnectionManager.Close(); + SqlConnectionManager = null; + Server = null; + } + disposedValue = true; + } + } + + public void Dispose() => Dispose(true); + public void Close() => Dispose(); + + public IDbConnectionManager Clone() { + SMOConnectionManager clone = new SMOConnectionManager(ConnectionString) { }; + return clone; + } + + + } + + + +} diff --git a/ETLBox/Toolbox/ConnectionManager/SqlConnectionManager.cs b/ETLBox/Toolbox/ConnectionManager/SqlConnectionManager.cs new file mode 100644 index 00000000..96009c18 --- /dev/null +++ b/ETLBox/Toolbox/ConnectionManager/SqlConnectionManager.cs @@ -0,0 +1,30 @@ +using System.Data; +using System.Data.SqlClient; + +namespace ALE.ETLBox { + public class SqlConnectionManager : DbConnectionManager { + + public SqlConnectionManager() :base(){ } + + public SqlConnectionManager(ConnectionString connectionString) : base(connectionString) { } + + public override void BulkInsert(IDataReader data, IColumnMappingCollection columnMapping, string tableName) { + using (SqlBulkCopy bulkCopy = new SqlBulkCopy(DbConnection, SqlBulkCopyOptions.TableLock, null)) { + bulkCopy.BulkCopyTimeout = 0; + bulkCopy.DestinationTableName = tableName; + foreach (IColumnMapping colMap in columnMapping) + bulkCopy.ColumnMappings.Add(colMap.SourceColumn, colMap.DataSetColumn); + bulkCopy.WriteToServer(data); + } + } + + public override IDbConnectionManager Clone() { + SqlConnectionManager clone = new SqlConnectionManager(ConnectionString) { + MaxLoginAttempts = this.MaxLoginAttempts + }; + return clone; + } + + + } +} diff --git a/ETLBox/Toolbox/ControlFlow/ControlFlow.cs b/ETLBox/Toolbox/ControlFlow/ControlFlow.cs new file mode 100644 index 00000000..790f2f9b --- /dev/null +++ b/ETLBox/Toolbox/ControlFlow/ControlFlow.cs @@ -0,0 +1,46 @@ +using NLog; +//using ALE.SchemaReference; +using System.Linq; + +namespace ALE.ETLBox { + public static class ControlFlow { + public static string STAGE { get; set; } + + static IDbConnectionManager _currentDbConnection; + public static IDbConnectionManager CurrentDbConnection { + get { + return _currentDbConnection; + } + set { + _currentDbConnection = value; + if (value != null) + SetLoggingDatabase(value); + } + } + public static AdomdConnectionManager CurrentAdomdConnection { get; set; } + public static ASConnectionManager CurrentASConnection { get; set; } + + + public static LoadProcess CurrentLoadProcess { get; internal set; } + + public static bool DisableAllLogging { get; set; } + static ControlFlow() { + NLog.Config.ConfigurationItemFactory.Default.LayoutRenderers.RegisterDefinition("etllog", typeof(ETLLogLayoutRenderer)); + } + + public static void SetLoggingDatabase(IConnectionManager connection) { + var dbTarget = LogManager.Configuration?.ConfiguredNamedTargets?.Where(t => t.GetType() == typeof(NLog.Targets.DatabaseTarget)).FirstOrDefault() as NLog.Targets.DatabaseTarget; + if (dbTarget != null) + dbTarget.ConnectionString = connection.ConnectionString.Value; //?? CurrentDbConnection.ConnectionString.Value; //""; Parameter.DWHConnection?.Value; + } + + public static void ClearSettings() { + CurrentDbConnection = null; + CurrentAdomdConnection = null; + CurrentASConnection = null; + CurrentLoadProcess = null; + DisableAllLogging = false; + } + + } +} diff --git a/ETLBox/Toolbox/ControlFlow/CustomTask.cs b/ETLBox/Toolbox/ControlFlow/CustomTask.cs new file mode 100644 index 00000000..8f5fee4b --- /dev/null +++ b/ETLBox/Toolbox/ControlFlow/CustomTask.cs @@ -0,0 +1,59 @@ +using System; + +namespace ALE.ETLBox { + public class CustomTask : GenericTask, ITask { + /* ITask interface */ + public override string TaskType { get; set; } = "CUSTOM"; + public override string TaskName { get; set; } + public new void Execute() { + throw new Exception("A custom task can't be used without an Action!"); + } + + public CustomTask(string name) { + NLogger = NLog.LogManager.GetLogger("ETL"); + this.TaskName = name; + } + + + public void Execute(Action task) { + NLogStart(); + task.Invoke(); + NLogFinish(); + } + + public void Execute(Action task, t1 param1) { + NLogStart(); + task.Invoke(param1); + NLogFinish(); + } + + public void Execute(Action task, t1 param1, t2 param2) { + NLogStart(); + task.Invoke(param1, param2); + NLogFinish(); + } + + public static void Execute(string name, Action task) => + new CustomTask(name).Execute(task); + + public static void Execute(string name, Action task, t1 param1) => + new CustomTask(name).Execute(task, param1); + + public static void Execute(string name, Action task, t1 param1, t2 param2) => + new CustomTask(name).Execute(task, param1, param2); + + NLog.Logger NLogger { get; set; } + + void NLogStart() { + if (!DisableLogging) + NLogger.Info(TaskName, TaskType, "START", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + void NLogFinish() { + if (!DisableLogging) + NLogger.Info(TaskName, TaskType, "END", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + + } +} \ No newline at end of file diff --git a/ETLBox/Toolbox/ControlFlow/Package.cs b/ETLBox/Toolbox/ControlFlow/Package.cs new file mode 100644 index 00000000..999570a4 --- /dev/null +++ b/ETLBox/Toolbox/ControlFlow/Package.cs @@ -0,0 +1,22 @@ +using System; + +namespace ALE.ETLBox { + public class Package : GenericTask, ITask { + public override string TaskType { get; set; } = "PACKAGE"; + public override string TaskName { get; set; } = "Package"; + public override void Execute() => new CustomTask(TaskName) { TaskType = this.TaskType, TaskHash = this.TaskHash }.Execute(Tasks); + public Action Tasks { get; set; } + + public Package() { } + + public Package(string name) : this() { + TaskName = name; + } + + public Package(string name, Action tasks) : this(name) { + this.Tasks = tasks; + } + + public static void Execute(string name, Action tasks) => new Package(name, tasks).Execute(); + } +} diff --git a/ETLBox/Toolbox/ControlFlow/Sequence.cs b/ETLBox/Toolbox/ControlFlow/Sequence.cs new file mode 100644 index 00000000..27681fed --- /dev/null +++ b/ETLBox/Toolbox/ControlFlow/Sequence.cs @@ -0,0 +1,43 @@ +using System; + +namespace ALE.ETLBox { + public class Sequence : GenericTask, ITask + { + public override string TaskType { get; set; } = "SEQUENCE"; + public override string TaskName { get; set; } = "Sequence"; + public override void Execute() => new CustomTask(TaskName) { TaskType = this.TaskType, TaskHash = this.TaskHash }.Execute(Tasks); + public Action Tasks { get; set; } + + public Sequence() { + + } + + public Sequence(string name) : this() { + TaskName = name; + } + + public Sequence(string name, Action tasks) : this(name) { + this.Tasks = tasks; + } + + public static void Execute(string name, Action tasks) => new Sequence(name, tasks).Execute(); + } + + public class Sequence : Sequence { + public T Parent { get; set; } + public new Action Tasks { get; set; } + public Sequence() :base(){ + + } + + public Sequence(string name) : base(name) { } + public Sequence(string name, Action tasks, T parent) : base(name) { + this.Tasks = tasks; + this.Parent = parent; + } + + public override void Execute() => new CustomTask(TaskName).Execute(Tasks, Parent); + + public static void Execute(string name, Action tasks, T parent) => new Sequence(name, tasks, parent).Execute(); + } +} diff --git a/ETLBox/Toolbox/Cube/DropCubeTask.cs b/ETLBox/Toolbox/Cube/DropCubeTask.cs new file mode 100644 index 00000000..d6dfb642 --- /dev/null +++ b/ETLBox/Toolbox/Cube/DropCubeTask.cs @@ -0,0 +1,26 @@ +namespace ALE.ETLBox { + public class DropCubeTask : GenericTask, ITask { + public override string TaskType { get; set; } = "DROPCUBE"; + public override string TaskName => $"Drops cube {ASConnectionManager.ConnectionString.CatalogName}"; + public override void Execute() { + NLogger.Info(TaskName, TaskType, "START", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + using (var conn = ASConnectionManager.Clone()) { + conn.Open(); + conn.DropIfExists(); + } + NLogger.Info(TaskName, TaskType, "END", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + public DropCubeTask() { + NLogger = NLog.LogManager.GetLogger("ETL"); + } + + public DropCubeTask(string name) : this() { + this.TaskName = name; + } + + public static void Execute(string name) => new DropCubeTask(name).Execute(); + + NLog.Logger NLogger { get; set; } + + } +} diff --git a/ETLBox/Toolbox/Cube/ProcessCubeTask.cs b/ETLBox/Toolbox/Cube/ProcessCubeTask.cs new file mode 100644 index 00000000..93d11293 --- /dev/null +++ b/ETLBox/Toolbox/Cube/ProcessCubeTask.cs @@ -0,0 +1,29 @@ +namespace ALE.ETLBox { + public class ProcessCubeTask : GenericTask, ITask + { + public override string TaskType { get; set; } = "PROCESSCUBE"; + public override string TaskName => $"Process cube {ASConnectionManager.ConnectionString.CatalogName}"; + public override void Execute() { + NLogger.Info(TaskName, TaskType, "START", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + using (var conn = ASConnectionManager.Clone()) { + conn.Open(); + conn.Process(); + } + NLogger.Info(TaskName, TaskType, "END", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + public ProcessCubeTask() + { + NLogger = NLog.LogManager.GetLogger("ETL"); + } + + public ProcessCubeTask(string name) : this() + { + this.TaskName = name; + } + + public static void Process(string name) => new ProcessCubeTask(name).Execute(); + + NLog.Logger NLogger { get; set; } + } +} diff --git a/ETLBox/Toolbox/Cube/XmlaTask.cs b/ETLBox/Toolbox/Cube/XmlaTask.cs new file mode 100644 index 00000000..28db3fab --- /dev/null +++ b/ETLBox/Toolbox/Cube/XmlaTask.cs @@ -0,0 +1,61 @@ +using System; +using System.Data; +using System.Linq; + +namespace ALE.ETLBox { + public class XmlaTask : DbTask { + public override string TaskType { get; set; } = "XMLA"; + public override string TaskName { get; set; } = "Run some xmla"; + public override void Execute() => ExecuteNonQuery(); + + public XmlaTask() { + Init(); + } + + public XmlaTask(string name) : base(name) { + Init(); + } + + public XmlaTask(string name, FileConnectionManager fileConnection) : base(name, fileConnection) { + Init(); + } + + public XmlaTask(ITask callingTask, string sql) : base(callingTask, sql) { + Init(); + } + + public XmlaTask(string name, string sql) : base(name, sql) { + Init(); + } + + public XmlaTask(string name, string sql, params Action[] actions) : base(name, sql, actions) { + Init(); + } + + public XmlaTask(string name, string sql, Action beforeRowReadAction, Action afterRowReadAction, params Action[] actions) : base(name, sql, beforeRowReadAction, afterRowReadAction, actions) { + Init(); + } + + private void Init() { + DoXMLCommentStyle = true; + UseAdomdConnection = true; + } + + /* Static methods for convenience */ + public static int ExecuteNonQuery(string name, string sql) => new XmlaTask(name, sql).ExecuteNonQuery(); + public static int ExecuteNonQuery(string name, FileConnectionManager fileConnection) => new XmlaTask(name, fileConnection).ExecuteNonQuery(); + public static object ExecuteScalar(string name, string sql) => new XmlaTask(name, sql).ExecuteScalar(); + public static Nullable ExecuteScalar(string name, string sql) where T : struct => new XmlaTask(name, sql).ExecuteScalar(); + public static bool ExecuteScalarAsBool(string name, string sql) => new XmlaTask(name, sql).ExecuteScalarAsBool(); + public static void ExecuteReaderSingleLine(string name, string sql, params Action[] actions) => + new XmlaTask(name, sql, actions) { ReadTopX = 1 }.ExecuteReader(); + public static void ExecuteReader(string name, string sql, params Action[] actions) => new XmlaTask(name, sql, actions).ExecuteReader(); + public static void ExecuteReader(string name, string sql, Action beforeRowReadAction, Action afterRowReadAction, params Action[] actions) => + new XmlaTask(name, sql, beforeRowReadAction, afterRowReadAction, actions).ExecuteReader(); + public static void ExecuteReader(string name, FileConnectionManager fileConnection, Action beforeRowReadAction, Action afterRowReadAction, params Action[] actions) => + new XmlaTask(name, fileConnection) { BeforeRowReadAction = beforeRowReadAction, AfterRowReadAction = afterRowReadAction, Actions = actions.ToList() }.ExecuteReader(); + + public static void BulkInsert(string name, IDataReader data, IColumnMappingCollection columnMapping, string tableName) => + new XmlaTask(name).BulkInsert(data, columnMapping, tableName); + } +} diff --git a/ETLBox/Toolbox/DataFlow/BlockTransformation.cs b/ETLBox/Toolbox/DataFlow/BlockTransformation.cs new file mode 100644 index 00000000..6c2a43bf --- /dev/null +++ b/ETLBox/Toolbox/DataFlow/BlockTransformation.cs @@ -0,0 +1,71 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks.Dataflow; + + +namespace ALE.ETLBox { + public class BlockTransformation : GenericTask, ITask, IDataFlowLinkTarget, IDataFlowLinkSource { + /* ITask Interface */ + public override string TaskType { get; set; } = "DF_BLOCKTRANSFORMATION"; + public override string TaskName { get; set; } = "Block Transformation (unnamed)"; + public override void Execute() { throw new Exception("Transformations can't be executed directly"); } + + /* Public Properties */ + public Func, List> BlockTransformationFunc { + get { + return _blockTransformationFunc; + } + set { + _blockTransformationFunc = value; + InputBuffer = new ActionBlock(row => InputData.Add(row)); + InputBuffer.Completion.ContinueWith(t => { + InputData = BlockTransformationFunc(InputData); + WriteIntoOutput(); + }); + + } + } + public ISourceBlock SourceBlock => OutputBuffer; + public ITargetBlock TargetBlock => InputBuffer; + + /* Private stuff */ + BufferBlock OutputBuffer { get; set; } + ActionBlock InputBuffer { get; set; } + Func, List> _blockTransformationFunc; + List InputData { get; set; } + NLog.Logger NLogger { get; set; } + public BlockTransformation() { + NLogger = NLog.LogManager.GetLogger("ETL"); + InputData = new List(); + OutputBuffer = new BufferBlock(); + } + + public BlockTransformation(Func, List> blockTransformationFunc) : this() { + BlockTransformationFunc = blockTransformationFunc; + } + + public BlockTransformation(string name, Func, List> blockTransformationFunc) : this(blockTransformationFunc) { + this.TaskName = name; + } + + private void WriteIntoOutput() { + foreach (TInput row in InputData) { + OutputBuffer.Post(row); + } + OutputBuffer.Complete(); + } + + public void LinkTo(IDataFlowLinkTarget target) { + OutputBuffer.LinkTo(target.TargetBlock, new DataflowLinkOptions() { PropagateCompletion = true }); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + public void LinkTo(IDataFlowLinkTarget target, Predicate predicate) { + OutputBuffer.LinkTo(target.TargetBlock, new DataflowLinkOptions() { PropagateCompletion = true }, predicate); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + } + + +} diff --git a/ETLBox/Toolbox/DataFlow/CSVSource.cs b/ETLBox/Toolbox/DataFlow/CSVSource.cs new file mode 100644 index 00000000..d8f0b525 --- /dev/null +++ b/ETLBox/Toolbox/DataFlow/CSVSource.cs @@ -0,0 +1,120 @@ +using CsvHelper; +using System; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using System.Threading.Tasks.Dataflow; + +namespace ALE.ETLBox { + public class CSVSource : GenericTask, ITask, IDataFlowSource { + /* ITask Interface */ + public override string TaskType { get; set; } = "DF_CSVSOURCE"; + public override string TaskName => $"Dataflow: Read CSV Source data from file: {FileName}"; + public override void Execute() => ExecuteAsync(); + + /* Public properties */ + public int SourceCommentRows { get; set; } = 0; + public bool TrimFields { get; set; } = true; + public bool TrimHeaders { get; set; } = true; + public string Delimiter { get; set; } = ","; + public char Quote { get; set; } = '"'; + public bool AllowComments { get; set; } = true; + public char Comment { get; set; } = '/'; + public bool SkipEmptyRecords { get; set; } = true; + public bool IgnoreBlankLines { get; set; } = true; + string FileName { get; set; } + public string[] FieldHeaders { get; private set; } + + public bool IsHeaderRead => FieldHeaders != null; + public ISourceBlock SourceBlock => this.Buffer; + + /* Private stuff */ + CsvReader CsvReader { get; set; } + StreamReader StreamReader { get; set; } + BufferBlock Buffer { get; set; } + NLog.Logger NLogger { get; set; } + + public CSVSource() { + NLogger = NLog.LogManager.GetLogger("ETL"); + } + + public CSVSource(string fileName) : this(){ + FileName = fileName; + Buffer = new BufferBlock(); + } + + public void ExecuteAsync() { + NLogStart(); + Open(); + ReadAll().Wait(); + Buffer.Complete(); + Close(); + NLogFinish(); + } + + private void Open() { + StreamReader = new StreamReader(FileName, Encoding.UTF8); + SkipSourceCommentRows(); + CsvReader = new CsvReader(StreamReader); + ConfigureCSVReader(); + } + private void SkipSourceCommentRows() { + for (int i = 0; i < SourceCommentRows; i++) + StreamReader.ReadLine(); + } + + private async Task ReadAll() { + bool headerRead = false; + while (CsvReader.Read()) { + if (!headerRead && CsvReader.FieldHeaders != null) { + FieldHeaders = CsvReader.FieldHeaders.Select(header => header.Trim()).ToArray(); + headerRead = true; + } + string[] line = new string[CsvReader.CurrentRecord.Length]; + for (int idx = 0; idx < CsvReader.CurrentRecord.Length; idx++) + line[idx] = CsvReader.GetField(idx); + await Buffer.SendAsync(line); + } + } + + private void ConfigureCSVReader() { + CsvReader.Configuration.Delimiter = Delimiter; + CsvReader.Configuration.Quote = Quote; + CsvReader.Configuration.AllowComments = AllowComments; + CsvReader.Configuration.Comment = Comment; + CsvReader.Configuration.SkipEmptyRecords = SkipEmptyRecords; + CsvReader.Configuration.IgnoreBlankLines = IgnoreBlankLines; + CsvReader.Configuration.TrimHeaders = TrimHeaders; + CsvReader.Configuration.TrimFields = TrimFields; + CsvReader.Configuration.Encoding = Encoding.UTF8; + } + + private void Close() { + CsvReader?.Dispose(); + CsvReader = null; + StreamReader?.Dispose(); + StreamReader = null; + } + + public void LinkTo(IDataFlowLinkTarget target) { + Buffer.LinkTo(target.TargetBlock, new DataflowLinkOptions() { PropagateCompletion = true }); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + public void LinkTo(IDataFlowLinkTarget target, Predicate predicate) { + Buffer.LinkTo(target.TargetBlock, new DataflowLinkOptions() { PropagateCompletion = true }, predicate); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + void NLogStart() { + if (!DisableLogging) + NLogger.Info(TaskName, TaskType, "START", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + void NLogFinish() { + if (!DisableLogging) + NLogger.Info(TaskName, TaskType, "END", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + } +} diff --git a/ETLBox/Toolbox/DataFlow/CustomDestination.cs b/ETLBox/Toolbox/DataFlow/CustomDestination.cs new file mode 100644 index 00000000..45bf4e65 --- /dev/null +++ b/ETLBox/Toolbox/DataFlow/CustomDestination.cs @@ -0,0 +1,42 @@ +using System; +using System.Threading.Tasks.Dataflow; + +namespace ALE.ETLBox { + public class CustomDestination : GenericTask, ITask, IDataFlowDestination { + + /* ITask Interface */ + public override string TaskType { get; set; } = "DF_CUSTOMDEST"; + public override string TaskName => $"Dataflow: Write Data into custom target (unnamed)"; + public override void Execute() { throw new Exception("Dataflow destinations can't be started directly"); } + + /* Public properties */ + public ITargetBlock TargetBlock => TargetActionBlock; + public Action WriteAction { + get { + return _writeAction; + } + set { + _writeAction = value; + TargetActionBlock = new ActionBlock(_writeAction); + + } + } + + /* Private stuff */ + private Action _writeAction; + + internal ActionBlock TargetActionBlock { get; set; } + + NLog.Logger NLogger { get; set; } + public CustomDestination() { + NLogger = NLog.LogManager.GetLogger("ETL"); + } + + public CustomDestination(Action writeAction) { + WriteAction = writeAction; + } + + public void Wait() => TargetActionBlock.Completion.Wait(); + } + +} diff --git a/ETLBox/Toolbox/DataFlow/CustomSource.cs b/ETLBox/Toolbox/DataFlow/CustomSource.cs new file mode 100644 index 00000000..4dd2362a --- /dev/null +++ b/ETLBox/Toolbox/DataFlow/CustomSource.cs @@ -0,0 +1,64 @@ +using System; +using System.Threading.Tasks.Dataflow; + +namespace ALE.ETLBox { + public class CustomSource : GenericTask, ITask, IDataFlowSource { + /* ITask Interface */ + public override string TaskType { get; set; } = "DF_CSVSOURCE"; + public override string TaskName => $"Read data from custom source"; + public override void Execute() => ExecuteAsync(); + + /* Public properties */ + public ISourceBlock SourceBlock => this.Buffer; + public Func ReadFunc { get; set; } + public Func ReadCompletedFunc { get; set; } + + /* Private stuff */ + BufferBlock Buffer { get; set; } + NLog.Logger NLogger { get; set; } + + public CustomSource() { + NLogger = NLog.LogManager.GetLogger("ETL"); + Buffer = new BufferBlock(); + } + + public CustomSource(Func readFunc, Func readCompletedFunc) : this() { + ReadFunc = readFunc; + ReadCompletedFunc = readCompletedFunc; + } + + public CustomSource(string name, Func readFunc, Func readCompletedFunc) : this(readFunc, readCompletedFunc) { + this.TaskName = name; + } + + public void ExecuteAsync() { + NLogStart(); + while (!ReadCompletedFunc.Invoke()) { + Buffer.Post(ReadFunc.Invoke()); + } + Buffer.Complete(); + NLogFinish(); + } + + + public void LinkTo(IDataFlowLinkTarget target) { + Buffer.LinkTo(target.TargetBlock, new DataflowLinkOptions() { PropagateCompletion = true }); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + public void LinkTo(IDataFlowLinkTarget target, Predicate predicate) { + Buffer.LinkTo(target.TargetBlock, new DataflowLinkOptions() { PropagateCompletion = true }, predicate); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + void NLogStart() { + if (!DisableLogging) + NLogger.Info(TaskName, TaskType, "START", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + void NLogFinish() { + if (!DisableLogging) + NLogger.Info(TaskName, TaskType, "END", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + } +} diff --git a/ETLBox/Toolbox/DataFlow/DBDestination.cs b/ETLBox/Toolbox/DataFlow/DBDestination.cs new file mode 100644 index 00000000..4fec5f89 --- /dev/null +++ b/ETLBox/Toolbox/DataFlow/DBDestination.cs @@ -0,0 +1,105 @@ +using System; +using System.Collections.Generic; +using System.Reflection; +using System.Threading.Tasks.Dataflow; + +namespace ALE.ETLBox { + public class DBDestination : GenericTask, ITask, IDataFlowDestination { + /* ITask Interface */ + public override string TaskType { get; set; } = "DF_DBDEST"; + public override string TaskName => $"Dataflow: Write Data batchwise into table {DestinationTableDefinition.Name}"; + public override void Execute() { throw new Exception("Dataflow destinations can't be started directly"); } + + /* Public properties */ + public TableDefinition DestinationTableDefinition { get; set; } + public Func BeforeBatchWrite { get; set; } + + public ITargetBlock TargetBlock => Buffer; + + /* Private stuff */ + int BatchSize { get; set; } = DEFAULT_BATCH_SIZE; + const int DEFAULT_BATCH_SIZE = 100000; + internal BatchBlock Buffer { get; set; } + internal ActionBlock TargetAction { get; set; } + NLog.Logger NLogger { get; set; } + TypeInfo TypeInfo { get; set; } + public DBDestination() { + InitObjects(DEFAULT_BATCH_SIZE); + + } + + public DBDestination(int batchSize) { + BatchSize = batchSize; + InitObjects(batchSize); + } + + public DBDestination(TableDefinition tableDefinition) { + DestinationTableDefinition = tableDefinition; + InitObjects(DEFAULT_BATCH_SIZE); + } + + public DBDestination(TableDefinition tableDefinition, int batchSize) { + DestinationTableDefinition = tableDefinition; + BatchSize = batchSize; + InitObjects(batchSize); + } + + public DBDestination(string name, TableDefinition tableDefinition, int batchSize) { + this.TaskName = name; + DestinationTableDefinition = tableDefinition; + BatchSize = batchSize; + InitObjects(batchSize); + } + + private void InitObjects(int batchSize) { + NLogger = NLog.LogManager.GetLogger("ETL"); + Buffer = new BatchBlock(batchSize); + TargetAction = new ActionBlock(d => WriteBatch(d)); + Buffer.LinkTo(TargetAction, new DataflowLinkOptions() { PropagateCompletion = true }); + TypeInfo = new TypeInfo(typeof(TInput)); + } + + private void WriteBatch(TInput[] data) { + NLogStart(); + if (BeforeBatchWrite != null) + data = BeforeBatchWrite.Invoke(data); + TableData td = new TableData(DestinationTableDefinition, DEFAULT_BATCH_SIZE); + td.Rows = ConvertRows(data); + new SqlTask(this, $"Execute Bulk insert into {DestinationTableDefinition.Name}").BulkInsert(td, td.ColumnMapping, DestinationTableDefinition.Name); + NLogFinish(); + } + + + private List ConvertRows(TInput[] data) { + List result = new List(); + foreach (var CurrentRow in data) { + object[] rowResult; + if (TypeInfo.IsArray) { + rowResult = CurrentRow as object[]; + } else { + rowResult = new object[TypeInfo.PropertyLength]; + int index = 0; + foreach (PropertyInfo propInfo in TypeInfo.PropertyInfos) { + rowResult[index] = propInfo.GetValue(CurrentRow); + index++; + } + } + result.Add(rowResult); + } + return result; + } + + public void Wait() => TargetAction.Completion.Wait(); + + void NLogStart() { + if (!DisableLogging) + NLogger.Debug(TaskName, TaskType, "START", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + void NLogFinish() { + if (!DisableLogging) + NLogger.Debug(TaskName, TaskType, "END", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + } + +} diff --git a/ETLBox/Toolbox/DataFlow/DBSource.cs b/ETLBox/Toolbox/DataFlow/DBSource.cs new file mode 100644 index 00000000..89f6c29d --- /dev/null +++ b/ETLBox/Toolbox/DataFlow/DBSource.cs @@ -0,0 +1,67 @@ +using System; +using System.Threading.Tasks.Dataflow; + +namespace ALE.ETLBox { + public class DBSource : GenericTask, ITask, IDataFlowSource where TOutput : new() { + /* ITask Interface */ + public override string TaskType { get; set; } = "DF_DBSOURCE"; + public override string TaskName => $"Dataflow: Read DB data from table: {SourceTableDefinition.Name}"; + public override void Execute() => ExecuteAsync(); + + /* Public Properties */ + public TableDefinition SourceTableDefinition { get; set; } + public ISourceBlock SourceBlock => this.Buffer; + + /* Private stuff */ + internal BufferBlock Buffer { get; set; } + NLog.Logger NLogger { get; set; } + + public DBSource() { + NLogger = NLog.LogManager.GetLogger("ETL"); + Buffer = new BufferBlock(); + } + + public DBSource(TableDefinition sourceTableDefinition) : this() { + SourceTableDefinition = sourceTableDefinition; + } + + public void ExecuteAsync() { + NLogStart(); + ReadAll(); + Buffer.Complete(); + NLogFinish(); + } + + public void ReadAll() { + new SqlTask() { + DisableLogging = true, + DisableExtension = true, + Sql = $"select {SourceTableDefinition.Columns.AsString()} from " + SourceTableDefinition.Name, + }.Query(row => Buffer.Post(row)); + } + + + public void LinkTo(IDataFlowLinkTarget target) { + Buffer.LinkTo(target.TargetBlock, new DataflowLinkOptions() { PropagateCompletion = true }); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + public void LinkTo(IDataFlowLinkTarget target, Predicate predicate) { + Buffer.LinkTo(target.TargetBlock, new DataflowLinkOptions() { PropagateCompletion = true }, predicate); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + void NLogStart() { + if (!DisableLogging) + NLogger.Info(TaskName, TaskType, "START", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + void NLogFinish() { + if (!DisableLogging) + NLogger.Info(TaskName, TaskType, "END", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + + } + +} diff --git a/ETLBox/Toolbox/DataFlow/Lookup.cs b/ETLBox/Toolbox/DataFlow/Lookup.cs new file mode 100644 index 00000000..281709e2 --- /dev/null +++ b/ETLBox/Toolbox/DataFlow/Lookup.cs @@ -0,0 +1,90 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks.Dataflow; + + +namespace ALE.ETLBox { + public class Lookup + : GenericTask, ITask, IDataFlowTransformation + where TSourceOutput : new() { + + /* ITask Interface */ + public override string TaskType { get; set; } = "DF_LOOKUP"; + public override string TaskName { get; set; } = "Lookup (unnamed)"; + public override void Execute() { throw new Exception("Transformations can't be executed directly"); } + + public List LookupList { get; set; } + ActionBlock LookupBuffer { get; set; } + + /* Public Properties */ + public ISourceBlock SourceBlock => RowTransformation.SourceBlock; + public ITargetBlock TargetBlock => RowTransformation.TargetBlock; + public IDataFlowSource Source { + get { + return _source; + } + set { + _source = value; + Source.SourceBlock.LinkTo(LookupBuffer, new DataflowLinkOptions() { PropagateCompletion = true }); + } + } + + /* Private stuff */ + RowTransformation RowTransformation { get; set; } + + private Func _rowTransformationFunc; + private IDataFlowSource _source; + + Func RowTransformationFunc { + get { + return _rowTransformationFunc; + } + set { + _rowTransformationFunc = value; + RowTransformation = new RowTransformation(_rowTransformationFunc); + RowTransformation.InitAction = LoadLookupData; + } + } + + NLog.Logger NLogger { get; set; } + public Lookup() { + NLogger = NLog.LogManager.GetLogger("ETL"); + LookupBuffer = new ActionBlock(row => FillBuffer(row)); + } + + public Lookup(Func rowTransformationFunc, IDataFlowSource source) : this() { + RowTransformationFunc = rowTransformationFunc; + Source = source; + } + + public Lookup(Func rowTransformationFunc, IDataFlowSource source, List lookupList) : this() { + RowTransformationFunc = rowTransformationFunc; + Source = source; + LookupList = lookupList; + } + + + private void LoadLookupData() { + Source.ExecuteAsync(); + LookupBuffer.Completion.Wait(); + } + + private void FillBuffer(TSourceOutput sourceRow) { + if (LookupList == null) LookupList = new List(); + LookupList.Add(sourceRow); + } + + public void LinkTo(IDataFlowLinkTarget target) { + RowTransformation.LinkTo(target); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + public void LinkTo(IDataFlowLinkTarget target, Predicate predicate) { + RowTransformation.LinkTo(target, predicate); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + + + } +} diff --git a/ETLBox/Toolbox/DataFlow/MergeJoin.cs b/ETLBox/Toolbox/DataFlow/MergeJoin.cs new file mode 100644 index 00000000..f69a1bb4 --- /dev/null +++ b/ETLBox/Toolbox/DataFlow/MergeJoin.cs @@ -0,0 +1,77 @@ +using System; +using System.Threading.Tasks.Dataflow; + + +namespace ALE.ETLBox { + public class MergeJoin : GenericTask, ITask, IDataFlowLinkSource { + private Func _mergeJoinFunc; + + /* ITask Interface */ + public override string TaskType { get; set; } = "DF_JOIN"; + public override string TaskName { get; set; } = "Join (unnamed)"; + public override void Execute() { throw new Exception("Transformations can't be executed directly"); } + + /* Public Properties */ + public MergeJoinTarget Target1 { get; set; } + public MergeJoinTarget Target2 { get; set; } + public ISourceBlock SourceBlock => Transformation.SourceBlock; + + public Func MergeJoinFunc { + get { return _mergeJoinFunc; } + set { + _mergeJoinFunc = value; + Transformation.RowTransformationFunc = new Func, TOutput>(tuple => _mergeJoinFunc.Invoke(tuple.Item1, tuple.Item2)); + JoinBlock.LinkTo(Transformation.TargetBlock, new DataflowLinkOptions { PropagateCompletion = true }); + } + } + + /* Private stuff */ + internal BufferBlock Buffer1 { get; set; } + internal BufferBlock Buffer2 { get; set; } + internal JoinBlock JoinBlock { get; set; } + internal RowTransformation, TOutput> Transformation { get; set; } + + NLog.Logger NLogger { get; set; } + + public MergeJoin() { + NLogger = NLog.LogManager.GetLogger("ETL"); + Transformation = new RowTransformation, TOutput>(); + JoinBlock = new JoinBlock(); + Target1 = new MergeJoinTarget(JoinBlock.Target1); + Target2 = new MergeJoinTarget(JoinBlock.Target2); + + + } + + public MergeJoin(Func mergeJoinFunc) : this() { + MergeJoinFunc = mergeJoinFunc; + } + + public MergeJoin(string name) : this() { + this.TaskName = name; + } + + public void LinkTo(IDataFlowLinkTarget target) { + Transformation.LinkTo(target); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + public void LinkTo(IDataFlowLinkTarget target, Predicate predicate) { + Transformation.LinkTo(target, predicate); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + } + + public class MergeJoinTarget : IDataFlowDestination{ + public ITargetBlock TargetBlock { get; set; } + + public void Wait() { + TargetBlock.Completion.Wait(); + } + public MergeJoinTarget(ITargetBlock joinTarget) { + TargetBlock = joinTarget; + } + } +} + diff --git a/ETLBox/Toolbox/DataFlow/Multicast.cs b/ETLBox/Toolbox/DataFlow/Multicast.cs new file mode 100644 index 00000000..dbb75509 --- /dev/null +++ b/ETLBox/Toolbox/DataFlow/Multicast.cs @@ -0,0 +1,52 @@ +using System; +using System.Reflection; +using System.Threading.Tasks.Dataflow; + + +namespace ALE.ETLBox { + public class Multicast : GenericTask, ITask, IDataFlowTransformation where TInput : new() { + /* ITask Interface */ + public override string TaskType { get; set; } = "DF_MULTICAST"; + public override string TaskName { get; set; } = "Multicast (unnamed)"; + public override void Execute() { throw new Exception("Transformations can't be executed directly"); } + + /* Public Properties */ + public ISourceBlock SourceBlock => BroadcastBlock; + public ITargetBlock TargetBlock => BroadcastBlock; + + /* Private stuff */ + internal BroadcastBlock BroadcastBlock { get; set; } + NLog.Logger NLogger { get; set; } + TypeInfo TypeInfo { get; set; } + public Multicast() { + NLogger = NLog.LogManager.GetLogger("ETL"); + TypeInfo = new TypeInfo(typeof(TInput)); + BroadcastBlock = new BroadcastBlock(Clone); + } + + public Multicast(string name) : this() { + this.TaskName = name; + } + + public void LinkTo(IDataFlowLinkTarget target) { + BroadcastBlock.LinkTo(target.TargetBlock, new DataflowLinkOptions() { PropagateCompletion = true }); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + public void LinkTo(IDataFlowLinkTarget target, Predicate predicate) { + BroadcastBlock.LinkTo(target.TargetBlock, new DataflowLinkOptions() { PropagateCompletion = true }, predicate); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + private TInput Clone(TInput row) { + TInput clone = default(TInput); + if (!TypeInfo.IsArray) { + clone = new TInput(); + foreach (PropertyInfo propInfo in TypeInfo.PropertyInfos) { + propInfo.SetValue(clone, propInfo.GetValue(row)); + } + } + return clone; + } + } +} diff --git a/ETLBox/Toolbox/DataFlow/RowTransformation.cs b/ETLBox/Toolbox/DataFlow/RowTransformation.cs new file mode 100644 index 00000000..62640550 --- /dev/null +++ b/ETLBox/Toolbox/DataFlow/RowTransformation.cs @@ -0,0 +1,71 @@ +using System; +using System.Threading.Tasks.Dataflow; + + +namespace ALE.ETLBox { + public class RowTransformation : GenericTask, ITask, IDataFlowTransformation { + /* ITask Interface */ + public override string TaskType { get; set; } = "DF_ROWTRANSFORMATION"; + public override string TaskName { get; set; } = "Row Transformation (unnamed)"; + public override void Execute() { throw new Exception("Transformations can't be executed directly"); } + + /* Public Properties */ + public Func RowTransformationFunc { + get { + return _rowTransformationFunc; + } + + set { + _rowTransformationFunc = value; + TransformBlock = new TransformBlock(row => InvokeRowTransformationFunc(row)); + } + } + public Action InitAction { get; set; } + public bool WasInitialized { get; private set; } = false; + + public ITargetBlock TargetBlock => TransformBlock; + public ISourceBlock SourceBlock => TransformBlock; + + /* Private stuff */ + Func _rowTransformationFunc; + internal TransformBlock TransformBlock { get; set; } + + NLog.Logger NLogger { get; set; } + public RowTransformation() { + NLogger = NLog.LogManager.GetLogger("ETL"); + } + + public RowTransformation(Func rowTransformationFunc) : this() { + RowTransformationFunc = rowTransformationFunc; + } + + public RowTransformation(string name, Func rowTransformationFunc) : this(rowTransformationFunc) { + this.TaskName = name; + } + + public RowTransformation(string name, Func rowTransformationFunc, Action initAction) : this(rowTransformationFunc) { + this.TaskName = name; + this.InitAction = initAction; + } + + public void LinkTo(IDataFlowLinkTarget target) { + TransformBlock.LinkTo(target.TargetBlock, new DataflowLinkOptions() { PropagateCompletion = true }); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + public void LinkTo(IDataFlowLinkTarget target, Predicate predicate) { + TransformBlock.LinkTo(target.TargetBlock, new DataflowLinkOptions() { PropagateCompletion = true }, predicate); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + + private TOutput InvokeRowTransformationFunc(TInput row) { + if (!WasInitialized) { + InitAction?.Invoke(); + WasInitialized = true; + NLogger.Debug(TaskName + " was initialized!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + return RowTransformationFunc.Invoke(row); + } + } +} diff --git a/ETLBox/Toolbox/DataFlow/Sort.cs b/ETLBox/Toolbox/DataFlow/Sort.cs new file mode 100644 index 00000000..09ea662b --- /dev/null +++ b/ETLBox/Toolbox/DataFlow/Sort.cs @@ -0,0 +1,63 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks.Dataflow; + + +namespace ALE.ETLBox { + public class Sort : GenericTask, ITask, IDataFlowLinkTarget, IDataFlowLinkSource { + + + /* ITask Interface */ + public override string TaskType { get; set; } = "DF_SORT"; + public override string TaskName { get; set; } = "Sort (unnamed)"; + public override void Execute() { throw new Exception("Transformations can't be executed directly"); } + + /* Public Properties */ + + public Comparison SortFunction { + get { return _sortFunction; } + set { + _sortFunction = value; + BlockTransformation = new BlockTransformation(SortByFunc); + } + } + + public ISourceBlock SourceBlock => BlockTransformation.SourceBlock; + public ITargetBlock TargetBlock => BlockTransformation.TargetBlock; + + /* Private stuff */ + Comparison _sortFunction; + BlockTransformation BlockTransformation { get; set; } + NLog.Logger NLogger { get; set; } + + public Sort() { + NLogger = NLog.LogManager.GetLogger("ETL"); + } + + public Sort(Comparison sortFunction) : this() { + SortFunction = sortFunction; + } + + public Sort(string name, Comparison sortFunction) : this(sortFunction) { + this.TaskName = name; + } + + List SortByFunc(List data) { + data.Sort(SortFunction); + return data; + } + + public void LinkTo(IDataFlowLinkTarget target) { + BlockTransformation.LinkTo(target); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + public void LinkTo(IDataFlowLinkTarget target, Predicate predicate) { + BlockTransformation.LinkTo(target, predicate); + NLogger.Debug(TaskName + " was linked to Target!", TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + } + + } + + +} diff --git a/ETLBox/Toolbox/Database/AddFileGroupTask.cs b/ETLBox/Toolbox/Database/AddFileGroupTask.cs new file mode 100644 index 00000000..2a1c4aa7 --- /dev/null +++ b/ETLBox/Toolbox/Database/AddFileGroupTask.cs @@ -0,0 +1,69 @@ +using System; + +namespace ALE.ETLBox { + public class AddFileGroupTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "CREATEFG"; + public override string TaskName => $"Create Filegroup {FileGroupName}"; + public override void Execute() => new SqlTask(this, Sql).ExecuteNonQuery(); + + /* Public properties */ + public string FileGroupName { get; set; } + public string DatabaseName { get; set; } + public string Size { get; set; } + public string Filegrowth { get; set; } + public bool IsDefaultFileGroup { get; set; } + public string Sql { + get { + return $@"--Add the filegroup + declare @sql nvarchar(4000) = N'select 1' + alter database [{DatabaseName}] add filegroup [{FileGroupName}] + + declare @path nvarchar(500) + select @path = substring([physical_name],0, len([physical_name]) - charindex('\', reverse([physical_name]))+1 ) + '\{FileGroupName}.ndf' + from sys.master_files + where name = '{DatabaseName}' + + set @sql = N' + alter database [{DatabaseName}] add file ( + name = N''{FileGroupName}'' + , filename = N''' + @path + ''' + , size = {Size} + , filegrowth = {Filegrowth} + ) + to filegroup [{FileGroupName}]' + exec sp_executesql @stmt=@sql + + set @sql = N'use [{DatabaseName}]' + exec sp_executesql @stmt=@sql + + {SetDefaultFileGroupSql} +"; + } + } + + public AddFileGroupTask() { } + + public AddFileGroupTask(string fileGroupName, string databaseName) : this() { + FileGroupName = fileGroupName; + DatabaseName = databaseName; + } + + public AddFileGroupTask(string name, string databaseName, string size, string filegrowth, bool isDefaultFileGroup) : this(name, databaseName) { + Size = size; + Filegrowth = filegrowth; + IsDefaultFileGroup = isDefaultFileGroup; + } + + public static void AddFileGroup(string fileGroupName, string databaseName) => new AddFileGroupTask(fileGroupName, databaseName).Execute(); + + public static void AddFileGroup(string fileGroupName, string databaseName, string size, string fileGrowth, bool isDefaultFileGroup) + => new AddFileGroupTask(fileGroupName, databaseName, size, fileGrowth, isDefaultFileGroup).Execute(); + + private string SetDefaultFileGroupSql => IsDefaultFileGroup ? + $@"--if not exists (select name from sys.filegroups where is_default = 1 and name = N'{FileGroupName}') + alter database [{DatabaseName}] MODIFY FILEGROUP [{FileGroupName}] default" : String.Empty; + + + } +} diff --git a/ETLBox/Toolbox/Database/CRUDProcedureTask.cs b/ETLBox/Toolbox/Database/CRUDProcedureTask.cs new file mode 100644 index 00000000..7ad33941 --- /dev/null +++ b/ETLBox/Toolbox/Database/CRUDProcedureTask.cs @@ -0,0 +1,64 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace ALE.ETLBox { + public class CRUDProcedureTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "CRUDPROC"; + public override string TaskName => $"{CreateOrAlterSql} procedure {ProcedureName}"; + public override void Execute() { + IsExisting = new SqlTask(this, CheckIfExistsSql) { TaskName = $"Check if procedure {ProcedureName} exists", TaskHash = this.TaskHash }.ExecuteScalarAsBool(); + new SqlTask(this, Sql).ExecuteNonQuery(); + } + + /* Public properties */ + public string ProcedureName { get; set; } + public string ProcedureDefinition { get; set; } + public IList ProcedureParameters { get; set; } + public string Sql => $@"{CreateOrAlterSql} procedure {ProcedureName} +{ParameterDefinition} +as +begin +set nocount on + +{ProcedureDefinition} + +end --End of Procedure + "; + + public CRUDProcedureTask() { + + } + public CRUDProcedureTask(string procedureName, string procedureDefinition) : this() { + this.ProcedureName = procedureName; + this.ProcedureDefinition = procedureDefinition; + } + + public CRUDProcedureTask(string procedureName, string procedureDefinition, IList procedureParameter) : this(procedureName, procedureDefinition) { + this.ProcedureParameters = procedureParameter; + } + + public CRUDProcedureTask(ProcedureDefinition definition) : this() { + this.ProcedureName = definition.Name; + this.ProcedureDefinition = definition.Definition; + this.ProcedureParameters = definition.Parameter; + } + + public static void CreateOrAlter(string procedureName, string procedureDefinition) => new CRUDProcedureTask(procedureName, procedureDefinition).Execute(); + public static void CreateOrAlter(string procedureName, string procedureDefinition, IList procedureParameter) + => new CRUDProcedureTask(procedureName, procedureDefinition, procedureParameter).Execute(); + + public static void CreateOrAlter(ProcedureDefinition procedure) + => new CRUDProcedureTask(procedure).Execute(); + + string CheckIfExistsSql => $@"if exists (select * from sys.objects where type = 'P' and object_id = object_id('{ProcedureName}')) select 1; +else select 0;"; + bool IsExisting { get; set; } + string CreateOrAlterSql => IsExisting ? "Alter" : "Create"; + string ParameterDefinition => ProcedureParameters?.Count > 0 ? + String.Join("," + Environment.NewLine, ProcedureParameters.Select(par => par.Sql)) + : String.Empty; + + } +} diff --git a/ETLBox/Toolbox/Database/CRUDViewTask.cs b/ETLBox/Toolbox/Database/CRUDViewTask.cs new file mode 100644 index 00000000..ef2d9bc0 --- /dev/null +++ b/ETLBox/Toolbox/Database/CRUDViewTask.cs @@ -0,0 +1,35 @@ +namespace ALE.ETLBox { + public class CRUDViewTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "CRUDVIEW"; + public override string TaskName => $"{CreateOrAlterSql} view {ViewName}"; + public override void Execute() { + IsExisting = new SqlTask(this, CheckIfExistsSql) { TaskName = $"Check if view {ViewName} exists", TaskHash = this.TaskHash }.ExecuteScalarAsBool(); + new SqlTask(this, Sql).ExecuteNonQuery(); + } + + /* Public properties */ + public string ViewName { get; set; } + public string Definition { get; set; } + public string Sql => $@"{CreateOrAlterSql} view {ViewName} +as +{Definition} +"; + + public CRUDViewTask() { + + } + public CRUDViewTask(string viewName, string definition) : this() { + this.ViewName = viewName; + this.Definition = definition; + } + + public static void CreateOrAlter(string viewName, string definition) => new CRUDViewTask(viewName, definition).Execute(); + + string CheckIfExistsSql => $@"if exists (select * from sys.objects where type = 'V' and object_id = object_id('{ViewName}')) select 1; +else select 0;"; + bool IsExisting { get; set; } + string CreateOrAlterSql => IsExisting ? "Alter" : "Create"; + + } +} diff --git a/ETLBox/Toolbox/Database/CalculateDatabaseHashTask.cs b/ETLBox/Toolbox/Database/CalculateDatabaseHashTask.cs new file mode 100644 index 00000000..a0fa986a --- /dev/null +++ b/ETLBox/Toolbox/Database/CalculateDatabaseHashTask.cs @@ -0,0 +1,61 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace ALE.ETLBox { + public class CalculateDatabaseHashTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "CALCDBHASH"; + public override string TaskName => $"Calculate hash value for schema(s) {SchemaNamesAsString}"; + public override void Execute() { + List allColumns = new List(); + new SqlTask(this, Sql) { + Actions = new List>() { + col => allColumns.Add((string)col) + } + } + .ExecuteReader(); + DatabaseHash = HashHelper.Encrypt_Char40(String.Join("|", allColumns)); + } + + /* Public properties */ + public List SchemaNames { get; set; } + + public string DatabaseHash { get; private set; } + + string SchemaNamesAsString => String.Join(",",SchemaNames.Select(name=>$"'{name}'")); + public string Sql => $@" +select sch.name + '.' + tbls.name + N'|' + + cols.name + N'|' + + typ.name + N'|' + + cast(cols.max_length as nvarchar(20))+ N'|' + + cast(cols.precision as nvarchar(20)) + N'|' + + cast(cols.scale as nvarchar(20)) + N'|' + + cast(cols.is_nullable as nvarchar(3)) + N'|' + + cast(cols.is_identity as nvarchar(3))+ N'|' + + cast(cols.is_computed as nvarchar(3)) as FullColumnName +from sys.columns cols +inner join sys.tables tbls on cols.object_id = tbls.object_id +inner join sys.schemas sch on sch.schema_id = tbls.schema_id +inner join sys.types typ on typ.user_type_id = cols.user_type_id +where tbls.type = 'U' +and sch.name in ({SchemaNamesAsString}) +order by sch.name, tbls.name, cols.column_id +"; + + public CalculateDatabaseHashTask() { + + } + public CalculateDatabaseHashTask(List schemaNames) : this() { + this.SchemaNames = schemaNames; + } + public CalculateDatabaseHashTask Calculate() { + Execute(); + return this; + } + + public static string Calculate(List schemaNames) => new CalculateDatabaseHashTask(schemaNames).Calculate().DatabaseHash; + + + } +} diff --git a/ETLBox/Toolbox/Database/CleanUpSchemaTask.cs b/ETLBox/Toolbox/Database/CleanUpSchemaTask.cs new file mode 100644 index 00000000..463e2adc --- /dev/null +++ b/ETLBox/Toolbox/Database/CleanUpSchemaTask.cs @@ -0,0 +1,111 @@ +namespace ALE.ETLBox { + public class CleanUpSchemaTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "CLEANSCHEMA"; + public override string TaskName => $"Clean up schema {SchemaName}"; + public override void Execute() { + new SqlTask(this, Sql).ExecuteNonQuery(); + } + + + /* Public properties */ + public string SchemaName { get; set; } + public string Sql { + get { + return $@" + declare @SchemaName nvarchar(1000) = '{SchemaName}' + declare @SQL varchar(4000) + declare @msg varchar(500) + + if OBJECT_ID('tempdb..#dropcode') is not null drop table #dropcode + + create table #dropcode ( + ID int identity(1, 1) + ,SQLStatement varchar(1000) + ) + + -- removes all the foreign keys that reference a PK in the target schema + select @SQL = 'select + '' alter table [''+SCHEMA_NAME(fk.schema_id)+''].[''+OBJECT_NAME(fk.parent_object_id)+''] drop constraint ''+ fk.[Name] + from sys.foreign_keys fk + join sys.tables t on t.object_id = fk.referenced_object_id + where t.schema_id = schema_id(''' + @SchemaName + ''') + and fk.schema_id <> t.schema_id + order by fk.name desc' + + insert into #dropcode + exec (@SQL) + + -- drop all default constraints, check constraints and Foreign Keys + select @SQL = 'select + '' alter table [''+schema_name(t.schema_id)+''].[''+OBJECT_NAME(fk.parent_object_id)+''] drop constraint ''+ fk.[Name] + from sys.objects fk + join sys.tables t on t.object_id = fk.parent_object_id + where t.schema_id = schema_id(''' + @SchemaName + ''') + and fk.type IN (''D'', ''C'', ''F'')' + + insert into #dropcode + exec (@SQL) + + -- drop all other objects in order + select @SQL = 'SELECT + CASE WHEN SO.type=''PK'' THEN '' ALTER TABLE [''+SCHEMA_NAME(SO.schema_id)+''].[''+OBJECT_NAME(SO.parent_object_id)+''] DROP CONSTRAINT [''+ SO.name + '']'' + WHEN SO.type=''U'' THEN '' DROP TABLE [''+SCHEMA_NAME(SO.schema_id)+''].[''+ SO.[Name] + '']'' + WHEN SO.type=''V'' THEN '' DROP VIEW [''+SCHEMA_NAME(SO.schema_id)+''].[''+ SO.[Name] + '']'' + WHEN SO.type=''P'' THEN '' DROP PROCEDURE [''+SCHEMA_NAME(SO.schema_id)+''].[''+ SO.[Name] + '']'' + WHEN SO.type=''TR'' THEN '' DROP TRIGGER [''+SCHEMA_NAME(SO.schema_id)+''].[''+ SO.[Name] + '']'' + WHEN SO.type=''SO'' THEN '' DROP SEQUENCE [''+SCHEMA_NAME(SO.schema_id)+''].[''+ SO.[Name] + '']'' + WHEN SO.type IN (''FN'', ''TF'',''IF'',''FS'',''FT'') THEN '' DROP FUNCTION [''+SCHEMA_NAME(SO.schema_id)+''].[''+ SO.[Name] + '']'' + END + FROM sys.objects SO + WHERE SO.schema_id = schema_id(''' + @SchemaName + + ''') + AND SO.type IN (''PK'', ''FN'', ''TF'', ''TR'', ''V'', ''U'', ''P'',''SO'') + ORDER BY CASE WHEN type = ''PK'' THEN 1 + WHEN type = ''TR'' THEN 2 + WHEN type = ''V'' THEN 3 + WHEN type = ''U'' THEN 4 + WHEN type in (''FN'', ''TF'', ''P'',''IF'',''FS'',''FT'') THEN 5 + ELSE 6 + END' + + insert into #dropcode + exec (@SQL) + + declare @ID int,@statement varchar(1000) + declare statement_cursor cursor for select SQLStatement from #dropcode order by ID asc + open statement_cursor + fetch statement_cursor into @statement + + while (@@FETCH_STATUS = 0) + begin + --print (@statement) + exec (@statement) + fetch statement_cursor into @statement + end + + close statement_cursor + deallocate statement_cursor +"; + } + } + + /* Some constructors */ + public CleanUpSchemaTask() { + } + + public CleanUpSchemaTask(string schemaName) : this() { + SchemaName = schemaName; + } + + + /* Static methods for convenience */ + public static void CleanUp(string schemaName) => new CleanUpSchemaTask(schemaName).Execute(); + + /* Implementation & stuff */ + + + } + + +} diff --git a/ETLBox/Toolbox/Database/CreateDatabaseTask.cs b/ETLBox/Toolbox/Database/CreateDatabaseTask.cs new file mode 100644 index 00000000..44010f50 --- /dev/null +++ b/ETLBox/Toolbox/Database/CreateDatabaseTask.cs @@ -0,0 +1,100 @@ +using System; + +namespace ALE.ETLBox { + public class CreateDatabaseTask : GenericTask, ITask + { + /* ITask Interface */ + public override string TaskType { get; set; } = "CREATEDB"; + public override string TaskName => $"Create DB {DatabaseName}"; + public override void Execute() + { + new SqlTask(this, Sql).ExecuteNonQuery(); + + } + + + /* Public properties */ + public string DatabaseName { get; set; } + public RecoveryModel RecoveryModel { get; set; } = RecoveryModel.Simple; + public string Collation { get; set; } + public string Sql + { + get + { + return + $@" +if (db_id('{DatabaseName}') is null) +begin + use [master] + --Create Database + create database [{DatabaseName}] {CollationString} + {RecoveryString} + alter database [{DatabaseName}] set auto_create_statistics on + alter database [{DatabaseName}] set auto_update_statistics on + alter database [{DatabaseName}] set auto_update_statistics_async off + alter database [{DatabaseName}] set auto_close off + alter database [{DatabaseName}] set auto_shrink off + + --wait for database to enter 'ready' state + declare @dbReady bit = 0 + while (@dbReady = 0) + begin + select @dbReady = case when DATABASEPROPERTYEX('{DatabaseName}', 'Collation') is null then 0 else 1 end + end +end +"; + } + } + + /* Some constructors */ + public CreateDatabaseTask() { + } + + public CreateDatabaseTask(string databaseName) : this() + { + DatabaseName = databaseName; + } + + public CreateDatabaseTask(string databaseName, RecoveryModel recoveryModel) : this(databaseName) + { + RecoveryModel = recoveryModel; + } + + public CreateDatabaseTask(string databaseName, RecoveryModel recoveryModel, string collation) : this(databaseName, recoveryModel) + { + Collation = collation; + } + + /* Static methods for convenience */ + public static void Create(string databaseName) => new CreateDatabaseTask(databaseName).Execute(); + public static void Create(string databaseName, RecoveryModel recoveryModel) => new CreateDatabaseTask(databaseName,recoveryModel).Execute(); + public static void Create(string databaseName, RecoveryModel recoveryModel, string collation) => new CreateDatabaseTask(databaseName, recoveryModel,collation).Execute(); + + /* Implementation & stuff */ + string RecoveryModelAsString + { + get + { + if (RecoveryModel == RecoveryModel.Simple) + return "simple"; + else if (RecoveryModel == RecoveryModel.BulkLogged) + return "bulk"; + else if (RecoveryModel == RecoveryModel.Full) + return "full"; + else return string.Empty; + } + } + bool HasCollation => !String.IsNullOrWhiteSpace(Collation); + string CollationString => HasCollation ? "collate " + Collation : string.Empty; + string RecoveryString => RecoveryModel != RecoveryModel.Default ? + $"alter database [{DatabaseName}] set recovery {RecoveryModelAsString} with no_wait" + : string.Empty; + + } + + public enum RecoveryModel + { + Default, Simple, BulkLogged, Full + } + +} diff --git a/ETLBox/Toolbox/Database/CreateIndexTask.cs b/ETLBox/Toolbox/Database/CreateIndexTask.cs new file mode 100644 index 00000000..04795a84 --- /dev/null +++ b/ETLBox/Toolbox/Database/CreateIndexTask.cs @@ -0,0 +1,47 @@ +using System; +using System.Collections.Generic; + +namespace ALE.ETLBox { + public class CreateIndexTask: GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "CREATEINDEX"; + public override string TaskName => $"Create index {IndexName} on table {TableName}"; + public override void Execute() => new SqlTask(this, Sql).ExecuteNonQuery(); + + /* Public properties */ + public string IndexName { get; set; } + public string TableName { get; set; } + public IList IndexColumns { get; set; } + public IList IncludeColumns { get; set; } + public bool IsUnique { get; set; } + public bool IsClustered { get; set; } + public string Sql => $@" +if not exists (select * from sys.indexes where name='{IndexName}' and object_id = object_id('{TableName}')) + create {UniqueSql} {ClusteredSql} index {IndexName} on {TableName} + ( {String.Join(",", IndexColumns)} ) + {IncludeSql} + with(PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, SORT_IN_TEMPDB = ON, DROP_EXISTING = OFF, ONLINE = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) +"; + + public CreateIndexTask() { + + } + public CreateIndexTask(string indexName, string tableName, IList indexColumns) : this() { + this.IndexName = indexName; + this.TableName = tableName; + this.IndexColumns = indexColumns; + } + + public CreateIndexTask(string indexName, string tableName, IList indexColumns, IList includeColumns) : this(indexName, tableName, indexColumns) { + this.IncludeColumns = includeColumns; + } + + public static void Create(string indexName, string tableName, IList indexColumns) => new CreateIndexTask(indexName,tableName,indexColumns).Execute(); + public static void Create(string indexName, string tableName, IList indexColumns, IList includeColumns) => new CreateIndexTask(indexName, tableName, indexColumns, includeColumns).Execute(); + + string UniqueSql => IsUnique ? "unique" : string.Empty; + string ClusteredSql => IsClustered ? "clustered" : "nonclustered"; + string IncludeSql => IncludeColumns?.Count > 0 ? $"include ({String.Join(" ,", IncludeColumns)})" : string.Empty; + + } +} diff --git a/ETLBox/Toolbox/Database/CreateSchemaTask.cs b/ETLBox/Toolbox/Database/CreateSchemaTask.cs new file mode 100644 index 00000000..2fa533f2 --- /dev/null +++ b/ETLBox/Toolbox/Database/CreateSchemaTask.cs @@ -0,0 +1,26 @@ +namespace ALE.ETLBox { + public class CreateSchemaTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "CREATESCHEMA"; + public override string TaskName => $"Create schema {SchemaName}"; + public override void Execute() => new SqlTask(this, Sql).ExecuteNonQuery(); + + /* Public properties */ + public string SchemaName { get; set; } + public string Sql => $@"if not exists (select schema_name(schema_id) from sys.schemas where schema_name(schema_id) = '{SchemaName}') +begin + exec sp_executesql N'create schema [{SchemaName}]' +end"; + + public CreateSchemaTask() { + + } + public CreateSchemaTask(string schemaName) : this() { + this.SchemaName = schemaName; + } + + public static void Create(string schemaName) => new CreateSchemaTask(schemaName).Execute(); + + + } +} diff --git a/ETLBox/Toolbox/Database/CreateTableTask.cs b/ETLBox/Toolbox/Database/CreateTableTask.cs new file mode 100644 index 00000000..8c1362fe --- /dev/null +++ b/ETLBox/Toolbox/Database/CreateTableTask.cs @@ -0,0 +1,72 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.RegularExpressions; + +namespace ALE.ETLBox { + public class CreateTableTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "CREATETABLE"; + public override string TaskName => $"Create table {TableName}"; + public override void Execute() => new SqlTask(this, Sql).ExecuteNonQuery(); + + /* Public properties */ + public string TableName { get; set; } + public string TableWithoutSchema => TableName.IndexOf('.') > 0 ? TableName.Substring(TableName.LastIndexOf('.') + 1) : TableName; + public IList Columns { get; set; } + + public bool OnlyNVarCharColumns { get; set; } + public string Sql => $@" +if object_id('{TableName}', 'U') is null + create table {TableName} ( + {ColumnsDefinitionSql} + ) +"; + + public CreateTableTask() { + + } + public CreateTableTask(string tableName, IList columns) : this() { + this.TableName = tableName; + this.Columns = columns; + } + + public CreateTableTask(TableDefinition tableDefinition) : this() { + this.TableName = tableDefinition.Name; + this.Columns = tableDefinition.Columns.Cast().ToList(); + } + + public static void Create(string tableName, IList columns) => new CreateTableTask(tableName, columns).Execute(); + public static void Create(string tableName, List columns) => new CreateTableTask(tableName, columns.Cast().ToList()).Execute(); + public static void Create(TableDefinition tableDefinition) => new CreateTableTask(tableDefinition).Execute(); + string ColumnsDefinitionSql => String.Join(" , " + Environment.NewLine, Columns?.Select(col => CreateTableDefinition(col))); + + string CreateTableDefinition(ITableColumn col) { + + string dataType = string.Empty; + if (String.IsNullOrWhiteSpace(col.ComputedColumn)) + dataType = OnlyNVarCharColumns ? $"nvarchar({DataTypeConverter.GetTypeLength(col.DataType)})" : col.DataType; + string identitySql = col.IsIdentity ? $"identity({col.IdentitySeed ?? 1},{col.IdentityIncrement ?? 1})" : string.Empty; + string collationSql = !String.IsNullOrWhiteSpace(col.Collation) ? $"collate {col.Collation}" : string.Empty; + string nullSql = string.Empty; + if (String.IsNullOrWhiteSpace(col.ComputedColumn)) + nullSql = col.AllowNulls ? "NULL" : "NOT NULL"; + string primarySql = col.IsPrimaryKey ? $"constraint [pk_{TableWithoutSchema}_{col.Name}] primary key clustered ( [{col.Name}] asc )" : string.Empty; + string defaultSql = string.Empty; + if (!col.IsPrimaryKey) + defaultSql = col.DefaultValue != null ? DefaultConstraintName(col.DefaultConstraintName) + $" default {SetQuotesIfString(col.DefaultValue)}" : string.Empty; + string computedColumnSql = !String.IsNullOrWhiteSpace(col.ComputedColumn) ? $"as {col.ComputedColumn}" : string.Empty; + return $@"[{col.Name}] {dataType} {identitySql} {collationSql} {nullSql} {primarySql} {defaultSql} {computedColumnSql}"; + } + + string DefaultConstraintName(string defConstrName) => !String.IsNullOrWhiteSpace(defConstrName) ? $"constraint {defConstrName}" : string.Empty; + + string SetQuotesIfString(string value) { + if (!Regex.IsMatch(value, @"^\d+(\.\d+|)$"))//@" ^ (\d|\.)+$")) + return $"'{value}'"; + else + return value; + + } + } +} diff --git a/ETLBox/Toolbox/Database/DropDatabaseTask.cs b/ETLBox/Toolbox/Database/DropDatabaseTask.cs new file mode 100644 index 00000000..9a2a9e82 --- /dev/null +++ b/ETLBox/Toolbox/Database/DropDatabaseTask.cs @@ -0,0 +1,53 @@ +namespace ALE.ETLBox { + public class DropDatabaseTask : GenericTask, ITask + { + /* ITask Interface */ + public override string TaskType { get; set; } = "DROPDB"; + public override string TaskName => $"Drop DB {DatabaseName}"; + public override void Execute() + { + new SqlTask(this, Sql).ExecuteNonQuery(); + } + + /* Public properties */ + public string DatabaseName { get; set; } + public string Sql + { + get + { + return + $@" +if (db_id('{DatabaseName}') is not null) +begin + use [master] + --Delete Database + alter database [{DatabaseName}] + set single_user with rollback immediate + alter database [{DatabaseName}] + set multi_user + drop database [{DatabaseName}] +end +"; + } + } + + /* Some constructors */ + public DropDatabaseTask() { + } + + public DropDatabaseTask(string databaseName) : this() + { + DatabaseName = databaseName; + } + + + /* Static methods for convenience */ + public static void Delete(string databaseName) => new DropDatabaseTask(databaseName).Execute(); + + /* Implementation & stuff */ + + + } + + +} diff --git a/ETLBox/Toolbox/Database/DropTableTask.cs b/ETLBox/Toolbox/Database/DropTableTask.cs new file mode 100644 index 00000000..30813ffd --- /dev/null +++ b/ETLBox/Toolbox/Database/DropTableTask.cs @@ -0,0 +1,43 @@ +namespace ALE.ETLBox { + public class DropTableTask : GenericTask, ITask + { + /* ITask Interface */ + public override string TaskType { get; set; } = "DROPTABLE"; + public override string TaskName => $"Drop Table {TableName}"; + public override void Execute() + { + new SqlTask(this, Sql).ExecuteNonQuery(); + } + + /* Public properties */ + public string TableName { get; set; } + public string Sql + { + get + { + return + $@" +if object_id('{TableName}', 'U') is not null + drop table {TableName} +"; + } + } + + /* Some constructors */ + public DropTableTask() { + } + + public DropTableTask(string tableName) : this() + { + TableName = tableName; + } + + + /* Static methods for convenience */ + public static void Drop(string tableName) => new DropTableTask(tableName).Execute(); + + + } + + +} diff --git a/ETLBox/Toolbox/Database/GetDatabaseListTask.cs b/ETLBox/Toolbox/Database/GetDatabaseListTask.cs new file mode 100644 index 00000000..5f647949 --- /dev/null +++ b/ETLBox/Toolbox/Database/GetDatabaseListTask.cs @@ -0,0 +1,38 @@ +using System; +using System.Collections.Generic; + +namespace ALE.ETLBox { + public class GetDatabaseListTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "GETDBLIST"; + public override string TaskName => $"Get names of all databases"; + public override void Execute() { + DatabaseNames = new List(); + new SqlTask(this, Sql) { + Actions = new List>() { + n => DatabaseNames.Add((string)n) + } + }.ExecuteReader(); + } + + + public List DatabaseNames { get; set; } + public string Sql { + get { + return $"SELECT [name] FROM master.dbo.sysdatabases WHERE dbid > 4"; + } + } + + public GetDatabaseListTask() { + + } + + public GetDatabaseListTask GetList() { + Execute(); + return this; + } + + public static List List() => new GetDatabaseListTask().GetList().DatabaseNames; + + } +} diff --git a/ETLBox/Toolbox/Database/RestoreDatabaseTask.cs b/ETLBox/Toolbox/Database/RestoreDatabaseTask.cs new file mode 100644 index 00000000..136b2b79 --- /dev/null +++ b/ETLBox/Toolbox/Database/RestoreDatabaseTask.cs @@ -0,0 +1,92 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; + +namespace ALE.ETLBox { + public class RestoreDatabaseTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "RESTOREDB"; + public override string TaskName => $"Restore DB {DatabaseName} from {Path.GetFullPath(FileName)}"; + + + public override void Execute() { + DefaultDataPath = (string)new SqlTask(this, DefaultDataPathSql) { TaskName = $"Read default data path" }.ExecuteScalar(); + FileList = new List(); + new SqlTask(this, FileListSql) { + TaskName = $"Read file list in backup file {Path.GetFullPath(FileName)}", + BeforeRowReadAction = () => CurrentBackupFile = new BackupFile(), + AfterRowReadAction = () => FileList.Add(CurrentBackupFile), + Actions = new List>() { + logicalName => CurrentBackupFile.LogicalName = (string)logicalName, + physicalName => CurrentBackupFile.PhysicalName = (string)physicalName, + type => CurrentBackupFile.Type = (string)type, + filegroupname => CurrentBackupFile.FileGroupName = (string)filegroupname, + size => { }, + MaxSize => { }, + fileid => CurrentBackupFile.FileID = (long)fileid, + } + }.ExecuteReader(); + new SqlTask(this, Sql).ExecuteNonQuery(); + } + + /* Public properties */ + public string DatabaseName { get; set; } + public string FileName { get; set; } + public string Sql { + get { + return + $@" +use [master] +restore database [{DatabaseName}] from disk = N'{Path.GetFullPath(FileName)}' with file=1, +" + +String.Join("," + Environment.NewLine, FileList.OrderBy(file => file.FileID) +.Select(file => $"move N'{file.LogicalName}' to N'{Path.Combine(DefaultDataPath, DatabaseName + file.Suffix)}'")) ++ $@" +, NOUNLOAD, REPLACE"; + + } + } + + /* Some constructors */ + public RestoreDatabaseTask() { + } + + public RestoreDatabaseTask(string databaseName, string fileName) : this() { + DatabaseName = databaseName; + FileName = fileName; + } + + + /* Static methods for convenience */ + public static void Restore(string databaseName, string fileName) => new RestoreDatabaseTask(databaseName, fileName).Execute(); + + /* Implementation & stuff */ + string DefaultDataPathSql => "select cast(serverproperty('InstanceDefaultDataPath') as nvarchar(1000)) as DefaultDataPath"; + string FileListSql => $@"use [master] +restore filelistonly from disk=N'{Path.GetFullPath(FileName)}'"; + List FileList { get; set; } + + internal class BackupFile { + internal string LogicalName { get; set; } + internal string PhysicalName { get; set; } + internal long FileID { get; set; } + internal string FileGroupName { get; set; } + internal string Type { get; set; } + internal string Suffix { + get { + if (Type == "D") + return FileID > 1 ? $"_{FileID}.ndf" : ".mdf"; + else + return FileID > 1 ? $"_{FileID}.log" : ".log"; + } + } + } + + BackupFile CurrentBackupFile { get; set; } + string DefaultDataPath { get; set; } + + } + + +} diff --git a/ETLBox/Toolbox/Database/RowCountTask.cs b/ETLBox/Toolbox/Database/RowCountTask.cs new file mode 100644 index 00000000..c39d274a --- /dev/null +++ b/ETLBox/Toolbox/Database/RowCountTask.cs @@ -0,0 +1,71 @@ +using System; + + +namespace ALE.ETLBox { + public class RowCountTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "ROWCOUNT"; + public override string TaskName => $"Count Rows for {TableName}" + (HasCondition ? $" with condition {Condition}" : ""); + public override void Execute() { + Rows = new SqlTask(this, Sql).ExecuteScalar(); + } + + public string TableName { get; set; } + public string Condition { get; set; } + public bool HasCondition => !String.IsNullOrWhiteSpace(Condition); + public int? Rows { get; private set; } + public bool? HasRows => Rows > 0; + public bool QuickQueryMode { get; set; } + public bool NoLock { get; set; } + public string Sql { + get { + return QuickQueryMode && !HasCondition ? $@"select cast(sum([rows]) as int) from sys.partitions where [object_id] = object_id(N'{TableName}') and index_id in (0,1)" : + $"select count(*) from {TableName} {WhereClause} {Condition} {NoLockHint}"; + } + } + + public RowCountTask() { + + } + public RowCountTask(string tableName) { + this.TableName = tableName; + } + + public RowCountTask(string tableName, RowCountOptions options) : this(tableName) { + if (options == RowCountOptions.QuickQueryMode) + QuickQueryMode = true; + if (options == RowCountOptions.NoLock) + NoLock = true; + + } + + public RowCountTask(string tableName, string condition) : this(tableName) { + this.Condition = condition; + } + + public RowCountTask(string tableName, string condition, RowCountOptions options) : this(tableName,options) { + this.Condition = condition; + } + + public RowCountTask Count() { + Execute(); + return this; + } + + public static int? Count(string tableName) => new RowCountTask(tableName).Count().Rows; + public static int? Count(string tableName, RowCountOptions options) => new RowCountTask(tableName,options).Count().Rows; + public static int? Count(string tableName, string condition) => new RowCountTask(tableName, condition).Count().Rows; + public static int? Count(string tableName, string condition, RowCountOptions options) => new RowCountTask(tableName, condition, options).Count().Rows; + + + string WhereClause => HasCondition ? "where" : String.Empty; + string NoLockHint => NoLock ? "with (nolock)" : String.Empty; + + } + + public enum RowCountOptions { + None, + QuickQueryMode, + NoLock + } +} diff --git a/ETLBox/Toolbox/Database/SqlTask.cs b/ETLBox/Toolbox/Database/SqlTask.cs new file mode 100644 index 00000000..2ccc5835 --- /dev/null +++ b/ETLBox/Toolbox/Database/SqlTask.cs @@ -0,0 +1,45 @@ +using System; +using System.Data; + +namespace ALE.ETLBox { + public class SqlTask : DbTask { + public override string TaskType { get; set; } = "SQL"; + public override string TaskName { get; set; } = "Run some sql"; + public override void Execute() => ExecuteNonQuery(); + public SqlTask() { + } + + public SqlTask(string name) : base(name) { + } + + public SqlTask(string name, FileConnectionManager fileConnection) : base(name, fileConnection) { + } + + public SqlTask(ITask callingTask, string sql) : base(callingTask, sql) { + } + + public SqlTask(string name, string sql) : base(name, sql) { + } + + public SqlTask(string name, string sql, params Action[] actions) : base(name, sql, actions) { + } + + public SqlTask(string name, string sql, Action beforeRowReadAction, Action afterRowReadAction, params Action[] actions) : base(name, sql, beforeRowReadAction, afterRowReadAction, actions) { + } + + /* Static methods for convenience */ + public static int ExecuteNonQuery(string name, string sql) => new SqlTask(name, sql).ExecuteNonQuery(); + public static int ExecuteNonQuery(string name, FileConnectionManager fileConnection) => new SqlTask(name, fileConnection).ExecuteNonQuery(); + public static object ExecuteScalar(string name, string sql) => new SqlTask(name, sql).ExecuteScalar(); + public static Nullable ExecuteScalar(string name, string sql) where T : struct => new SqlTask(name, sql).ExecuteScalar(); + public static bool ExecuteScalarAsBool(string name, string sql) => new SqlTask(name, sql).ExecuteScalarAsBool(); + public static void ExecuteReaderSingleLine(string name, string sql, params Action[] actions) => + new SqlTask(name, sql, actions) { ReadTopX = 1 }.ExecuteReader(); + public static void ExecuteReader(string name, string sql, params Action[] actions) => new SqlTask(name, sql, actions).ExecuteReader(); + public static void ExecuteReader(string name, string sql, Action beforeRowReadAction, Action afterRowReadAction, params Action[] actions) => + new SqlTask(name, sql, beforeRowReadAction, afterRowReadAction, actions).ExecuteReader(); + public static void BulkInsert(string name, IDataReader data, IColumnMappingCollection columnMapping, string tableName) => + new SqlTask(name).BulkInsert(data, columnMapping, tableName); + } + +} diff --git a/ETLBox/Toolbox/Database/TruncateTableTask.cs b/ETLBox/Toolbox/Database/TruncateTableTask.cs new file mode 100644 index 00000000..6a1ca5b1 --- /dev/null +++ b/ETLBox/Toolbox/Database/TruncateTableTask.cs @@ -0,0 +1,23 @@ +namespace ALE.ETLBox { + public class TruncateTableTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "TRUNCATE"; + public override string TaskName => $"Truncate table {TableName}"; + public override void Execute() => new SqlTask(this, Sql) .ExecuteNonQuery(); + + /* Public properties */ + public string TableName { get; set; } + public string Sql => $@"truncate table {TableName}"; + + public TruncateTableTask() { + + } + public TruncateTableTask(string tableName) : this() { + this.TableName = tableName; + } + + public static void Truncate(string tableName) => new TruncateTableTask(tableName).Execute(); + + + } +} diff --git a/ETLBox/Toolbox/Logging/AbortLoadProcessTask.cs b/ETLBox/Toolbox/Logging/AbortLoadProcessTask.cs new file mode 100644 index 00000000..b48bafbd --- /dev/null +++ b/ETLBox/Toolbox/Logging/AbortLoadProcessTask.cs @@ -0,0 +1,52 @@ +namespace ALE.ETLBox { + public class AbortLoadProcessTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "LOADPROCESS_ABORT"; + public override string TaskName => $"Abort process with key {LoadProcessKey}"; + public override void Execute() { + new SqlTask(this, Sql) { DisableLogging = true }.ExecuteNonQuery(); + var rlp = new ReadLoadProcessTableTask(LoadProcessKey) { TaskType = this.TaskType, TaskHash = this.TaskHash, DisableLogging = true }; + rlp.Execute(); + ControlFlow.CurrentLoadProcess = rlp.LoadProcess; + } + + /* Public properties */ + public int? _loadProcessKey; + public int? LoadProcessKey { + get { + return _loadProcessKey ?? ControlFlow.CurrentLoadProcess?.LoadProcessKey; + } + set { + _loadProcessKey = value; + } + } + public string AbortMessage { get; set; } + + + public string Sql => $@"EXECUTE etl.AbortLoadProcess + @LoadProcessKey = '{LoadProcessKey ?? ControlFlow.CurrentLoadProcess.LoadProcessKey}', + @AbortMessage = {AbortMessage.NullOrSqlString()}"; + + public AbortLoadProcessTask() { + + } + + public AbortLoadProcessTask(int? loadProcessKey) : this() { + this.LoadProcessKey = loadProcessKey; + } + public AbortLoadProcessTask(int? loadProcessKey, string abortMessage) : this(loadProcessKey) { + this.AbortMessage = abortMessage; + } + + public AbortLoadProcessTask(string abortMessage) : this() { + this.AbortMessage = abortMessage; + } + + public static void Abort() => new AbortLoadProcessTask().Execute(); + public static void Abort(int? loadProcessKey) => new AbortLoadProcessTask(loadProcessKey).Execute(); + public static void Abort(string abortMessage) => new AbortLoadProcessTask(abortMessage).Execute(); + public static void Abort(int? loadProcessKey, string abortMessage) => new AbortLoadProcessTask(loadProcessKey, abortMessage).Execute(); + + + } +} diff --git a/ETLBox/Toolbox/Logging/CleanUpLogTask.cs b/ETLBox/Toolbox/Logging/CleanUpLogTask.cs new file mode 100644 index 00000000..1d20c24f --- /dev/null +++ b/ETLBox/Toolbox/Logging/CleanUpLogTask.cs @@ -0,0 +1,29 @@ +namespace ALE.ETLBox { + public class CleanUpLogTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "CLEANUPLOG"; + public override string TaskName => $"Clean up log tables"; + public override void Execute() { + new SqlTask(this, Sql) { DisableLogging = true, DisableExtension = true }.ExecuteNonQuery(); + } + + public int DaysToKeep { get; set; } + + /* Public properties */ + public string Sql => $@" +delete from etl.Log + where LogDate < Dateadd(day,-{DaysToKeep},GETDATE()) +"; + + public CleanUpLogTask() { } + + public CleanUpLogTask(int daysToKeep) : this() { + DaysToKeep = daysToKeep; + } + public static void CleanUp(int daysToKeep) => new CleanUpLogTask(daysToKeep).Execute(); + + + + + } +} diff --git a/ETLBox/Toolbox/Logging/CreateLogTablesTask.cs b/ETLBox/Toolbox/Logging/CreateLogTablesTask.cs new file mode 100644 index 00000000..f80884db --- /dev/null +++ b/ETLBox/Toolbox/Logging/CreateLogTablesTask.cs @@ -0,0 +1,156 @@ +using System; +using System.Collections.Generic; + +namespace ALE.ETLBox { + public class CreateLogTablesTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "CREATELOG"; + public override string TaskName => $"Create log tables"; + public override void Execute() { + ExecuteTasks(); + } + + public CreateLogTablesTask() { + CreateETLSchema(); + CreateETLLogTable(); + CreateLoadProcessTable(); + CreateStartProcessProcedure(); + CreateTransferCompletedProcedure(); + CreateEndProcessProcedure(); + CreateAbortProcessProcedure(); + } + + private void CreateETLSchema() { + EtlSchema = new CreateSchemaTask("etl") { DisableLogging = true }; + } + + private void CreateETLLogTable() { + List columns = new List() { + new TableColumn("LogKey","int", allowNulls: false, isPrimaryKey: true, isIdentity:true), + new TableColumn("LogDate","datetime", allowNulls: false), + new TableColumn("Level","nvarchar(10)", allowNulls: true), + new TableColumn("Stage","nvarchar(20)", allowNulls: true), + new TableColumn("Message","nvarchar(4000)", allowNulls: true), + new TableColumn("TaskType","nvarchar(40)", allowNulls: true), + new TableColumn("TaskAction","nvarchar(5)", allowNulls: true), + new TableColumn("TaskHash","char(40)", allowNulls: true), + new TableColumn("Source","nvarchar(20)", allowNulls: true), + new TableColumn("LoadProcessKey","int", allowNulls: true) + }; + LogTable = new CreateTableTask("etl.Log", columns) { DisableLogging = true }; + } + + private void CreateLoadProcessTable() { + List lpColumns = new List() { + new TableColumn("LoadProcessKey","int", allowNulls: false, isPrimaryKey: true, isIdentity:true), + new TableColumn("StartDate","datetime", allowNulls: false), + new TableColumn("TransferCompletedDate","datetime", allowNulls: true), + new TableColumn("EndDate","datetime", allowNulls: true), + new TableColumn("Source","nvarchar(20)", allowNulls: true), + new TableColumn("ProcessName","nvarchar(100)", allowNulls: false) { DefaultValue = "N/A" }, + new TableColumn("StartMessage","nvarchar(4000)", allowNulls: true) , + new TableColumn("IsRunning","bit", allowNulls: false) { DefaultValue = "1" }, + new TableColumn("EndMessage","nvarchar(4000)", allowNulls: true) , + new TableColumn("WasSuccessful","bit", allowNulls: false) { DefaultValue = "0" }, + new TableColumn("AbortMessage","nvarchar(4000)", allowNulls: true) , + new TableColumn("WasAborted","bit", allowNulls: false) { DefaultValue = "0" }, + new TableColumn() { Name= "IsFinished", ComputedColumn = "case when EndDate is not null then cast(1 as bit) else cast(0 as bit) end" }, + new TableColumn() { Name= "IsTransferCompleted", ComputedColumn = "case when TransferCompletedDate is not null then cast(1 as bit) else cast(0 as bit) end" }, + + }; + LoadProcessTable = new CreateTableTask("etl.LoadProcess", lpColumns) { DisableLogging = true }; + } + + private void CreateStartProcessProcedure() { + StartProcess = new CRUDProcedureTask("etl.StartLoadProcess", $@"-- Create entry in etlLoadProcess + insert into etl.LoadProcess(StartDate, ProcessName, StartMessage, Source, IsRunning) + select getdate(),@ProcessName, @StartMessage,@Source, 1 as IsRunning + select @LoadProcessKey = SCOPE_IDENTITY()" + , new List() { + new ProcedureParameter("ProcessName","nvarchar(100)"), + new ProcedureParameter("StartMessage","nvarchar(4000)",""), + new ProcedureParameter("Source","nvarchar(20)",""), + new ProcedureParameter("LoadProcessKey","int") { Out = true } + }) { DisableLogging = true }; + } + + private void CreateTransferCompletedProcedure() { + TransferCompletedForProcess = new CRUDProcedureTask("etl.TransferCompletedForLoadProcess", $@"-- Set transfer completion date in load process + update etl.LoadProcess + set TransferCompletedDate = getdate() + where LoadProcessKey = @LoadProcessKey + " + , new List() { + new ProcedureParameter("LoadProcessKey","int") + }) { DisableLogging = true }; + } + + private void CreateEndProcessProcedure() { + EndProcess = new CRUDProcedureTask("etl.EndLoadProcess", $@"-- Set entry in etlLoadProcess to completed + update etl.LoadProcess + set EndDate = getdate() + , IsRunning = 0 + , WasSuccessful = 1 + , WasAborted = 0 + , EndMessage = @EndMessage + where LoadProcessKey = @LoadProcessKey + " + , new List() { + new ProcedureParameter("LoadProcessKey","int"), + new ProcedureParameter("EndMessage","nvarchar(4000)",""), + }) { DisableLogging = true }; + } + + private void CreateAbortProcessProcedure() { + AbortProcess = new CRUDProcedureTask("etl.AbortLoadProcess", $@"-- Set entry in etlLoadProcess to aborted + update etl.LoadProcess + set EndDate = getdate() + , IsRunning = 0 + , WasSuccessful = 0 + , WasAborted = 1 + , AbortMessage = @AbortMessage + where LoadProcessKey = @LoadProcessKey + " + , new List() { + new ProcedureParameter("LoadProcessKey","int"), + new ProcedureParameter("AbortMessage","nvarchar(4000)",""), + }) { DisableLogging = true }; + } + + + public static void CreateLog() => new CreateLogTablesTask().Execute(); + public string Sql => EtlSchema.Sql + Environment.NewLine + + LoadProcessTable.Sql + Environment.NewLine + + LogTable.Sql + Environment.NewLine + + StartProcess.Sql + Environment.NewLine + + EndProcess.Sql + Environment.NewLine + + AbortProcess.Sql + Environment.NewLine + + TransferCompletedForProcess.Sql + Environment.NewLine + ; + + private void ExecuteTasks() { + EtlSchema.ConnectionManager = this.ConnectionManager; + LogTable.ConnectionManager = this.ConnectionManager; + LoadProcessTable.ConnectionManager = this.ConnectionManager; + StartProcess.ConnectionManager = this.ConnectionManager; + EndProcess.ConnectionManager = this.ConnectionManager; + AbortProcess.ConnectionManager = this.ConnectionManager; + TransferCompletedForProcess.ConnectionManager = this.ConnectionManager; + EtlSchema.Execute(); + LogTable.Execute(); + LoadProcessTable.Execute(); + StartProcess.Execute(); + EndProcess.Execute(); + AbortProcess.Execute(); + TransferCompletedForProcess.Execute(); + } + + CreateTableTask LogTable { get; set; } + CreateTableTask LoadProcessTable { get; set; } + CreateSchemaTask EtlSchema { get; set; } + CRUDProcedureTask StartProcess { get; set; } + CRUDProcedureTask EndProcess { get; set; } + CRUDProcedureTask AbortProcess { get; set; } + CRUDProcedureTask TransferCompletedForProcess { get; set; } + } +} diff --git a/ETLBox/Toolbox/Logging/EndLoadProcessTask.cs b/ETLBox/Toolbox/Logging/EndLoadProcessTask.cs new file mode 100644 index 00000000..88e9501f --- /dev/null +++ b/ETLBox/Toolbox/Logging/EndLoadProcessTask.cs @@ -0,0 +1,49 @@ +namespace ALE.ETLBox { + public class EndLoadProcessTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "LOADPROCESS_END"; + public override string TaskName => $"End process with key {LoadProcessKey}"; + public override void Execute() { + new SqlTask(this, Sql) { DisableLogging = true }.ExecuteNonQuery(); + var rlp = new ReadLoadProcessTableTask(LoadProcessKey) { TaskType = this.TaskType, TaskHash = this.TaskHash, DisableLogging = true }; + rlp.Execute(); + ControlFlow.CurrentLoadProcess = rlp.LoadProcess; + } + + /* Public properties */ + public int? _loadProcessKey; + public int? LoadProcessKey { + get { + return _loadProcessKey ?? ControlFlow.CurrentLoadProcess?.LoadProcessKey; + } + set { + _loadProcessKey = value; + } + } + public string EndMessage { get; set; } + + + public string Sql => $@"EXECUTE etl.EndLoadProcess + @LoadProcessKey = '{LoadProcessKey}', + @EndMessage = {EndMessage.NullOrSqlString()}"; + + public EndLoadProcessTask() { + + } + + public EndLoadProcessTask(int? loadProcessKey) : this() { + this.LoadProcessKey = loadProcessKey; + } + public EndLoadProcessTask(int? loadProcessKey, string endMessage) : this(loadProcessKey) { + this.EndMessage = endMessage; + } + public EndLoadProcessTask(string endMessage) : this(null, endMessage) { } + + public static void End() => new EndLoadProcessTask().Execute(); + public static void End(int? loadProcessKey) => new EndLoadProcessTask(loadProcessKey).Execute(); + public static void End(int? loadProcessKey, string endMessage) => new EndLoadProcessTask(loadProcessKey, endMessage).Execute(); + public static void End(string endMessage) => new EndLoadProcessTask(null, endMessage).Execute(); + + + } +} diff --git a/ETLBox/Toolbox/Logging/GetLoadProcessAsJSONTask.cs b/ETLBox/Toolbox/Logging/GetLoadProcessAsJSONTask.cs new file mode 100644 index 00000000..468c1898 --- /dev/null +++ b/ETLBox/Toolbox/Logging/GetLoadProcessAsJSONTask.cs @@ -0,0 +1,38 @@ +using Newtonsoft.Json; +using Newtonsoft.Json.Serialization; +using System.Collections.Generic; + +namespace ALE.ETLBox { + public class GetLoadProcessAsJSONTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "LOADPROCESS_GETJSON"; + public override string TaskName => $"Get load process list as JSON"; + + public override void Execute() { + //TODO umschreiben in eine Zeile? + var read = new ReadLoadProcessTableTask() { ReadOption = ReadOptions.ReadAllProcesses}; + read.Execute(); + List logEntries = read.AllLoadProcesses; + JSON = JsonConvert.SerializeObject(logEntries, new JsonSerializerSettings { + Formatting = Formatting.Indented, + ContractResolver = new CamelCasePropertyNamesContractResolver(), + NullValueHandling = NullValueHandling.Ignore + }); + + } + + public string JSON { get; private set; } + + public GetLoadProcessAsJSONTask Create() { + this.Execute(); + return this; + } + + public GetLoadProcessAsJSONTask() { + + } + + public static string GetJSON() => new GetLoadProcessAsJSONTask().Create().JSON; + + } +} diff --git a/ETLBox/Toolbox/Logging/GetLogAsJSONTask.cs b/ETLBox/Toolbox/Logging/GetLogAsJSONTask.cs new file mode 100644 index 00000000..9cf1d7a4 --- /dev/null +++ b/ETLBox/Toolbox/Logging/GetLogAsJSONTask.cs @@ -0,0 +1,84 @@ +using Newtonsoft.Json; +using Newtonsoft.Json.Serialization; +using System.Collections.Generic; +using System.Linq; + +namespace ALE.ETLBox { + public class GetLogAsJSONTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "LOG_GETJSON"; + public override string TaskName => $"Get log as JSON for {LoadProcessKey}"; + + public override void Execute() { + List logEntries = ReadLogTableTask.Read(LoadProcessKey); + CalculateEndDate(logEntries); + LogHierarchyEntry hierarchy = CreateHierarchyStructure(logEntries); + JSON = JsonConvert.SerializeObject(hierarchy, new JsonSerializerSettings { + Formatting = Formatting.Indented, + ContractResolver = new CamelCasePropertyNamesContractResolver(), + NullValueHandling = NullValueHandling.Ignore }); + } + + private void CalculateEndDate(List logEntries) { + foreach (var startEntry in logEntries.Where(entry => entry.TaskAction == "START")) { + var endEntry = logEntries.Where(entry => entry.TaskAction == "END" && entry.TaskHash == startEntry.TaskHash && entry.LogKey > startEntry.LogKey).FirstOrDefault(); + startEntry.EndDate = endEntry.LogDate; + } + } + + private LogHierarchyEntry CreateHierarchyStructure(List entries) { + LogHierarchyEntry root = new LogHierarchyEntry(new LogEntry() { TaskType = "ROOT" }); + var currentParent = root; + var currentList = root.Children; + foreach (LogEntry entry in entries) { + if (ContainerTypes.Contains(entry.TaskType.ToLower()) && entry.TaskAction == "START") { + var newEntry = new LogHierarchyEntry(entry) { Parent = currentParent }; + currentList.Add(newEntry); + currentParent = newEntry; + currentList = newEntry.Children; + } else if (ContainerTypes.Contains(entry.TaskType.ToLower()) && entry.TaskAction == "END") { + currentParent = currentParent.Parent; + currentList = currentParent.Children; + } else if (entry.TaskAction == "START" || entry.TaskAction == "LOG") { + var hierarchyEntry = new LogHierarchyEntry(entry) { Parent = currentParent }; + currentList.Add(hierarchyEntry); + } + } + return root; + } + + /* Public properties */ + public List ContainerTypes => new List() { "sequence", "subpackage", "package" }; + + public int? _loadProcessKey; + public int? LoadProcessKey { + get { + return _loadProcessKey ?? ControlFlow.CurrentLoadProcess?.LoadProcessKey; + } + set { + _loadProcessKey = value; + } + } + + public List LogEntryHierarchy { get; set; } + + public string JSON { get; private set; } + + public GetLogAsJSONTask Create() { + this.Execute(); + return this; + } + + public GetLogAsJSONTask() { + + } + + public GetLogAsJSONTask(int? loadProcessKey) : this() { + this.LoadProcessKey = loadProcessKey; + } + + public static string GetJSON() => new GetLogAsJSONTask().Create().JSON; + public static string GetJSON(int? loadProcessKey) => new GetLogAsJSONTask(loadProcessKey).Create().JSON; + + } +} diff --git a/ETLBox/Toolbox/Logging/LogTask.cs b/ETLBox/Toolbox/Logging/LogTask.cs new file mode 100644 index 00000000..fdf7cb3e --- /dev/null +++ b/ETLBox/Toolbox/Logging/LogTask.cs @@ -0,0 +1,35 @@ +namespace ALE.ETLBox { + public class LogTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "LOG"; + public override string TaskName => $"Logs message"; + public override void Execute() { + Info(Message); + } + + /* Public properties */ + public string Message { get; set; } + + public LogTask() { + NLogger = NLog.LogManager.GetLogger("ETL"); + } + + public LogTask(string message) : this() { + Message = message; + } + //NLogger.Info(TaskName, TaskType, "START", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + public void Trace() => NLogger?.Trace(Message, TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + public void Debug() => NLogger?.Debug(Message, TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + public void Info() => NLogger?.Info(Message, TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + public void Warn() => NLogger?.Warn(Message, TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + public void Error() => NLogger?.Error(Message, TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + public void Fatal() => NLogger?.Fatal(Message, TaskType, "LOG", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey); + public static void Trace(string message) => new LogTask(message).Trace(); + public static void Debug(string message) => new LogTask(message).Debug(); + public static void Info(string message) => new LogTask(message).Info(); + public static void Warn(string message) => new LogTask(message).Warn(); + public static void Error(string message) => new LogTask(message).Error(); + public static void Fatal(string message) => new LogTask(message).Fatal(); + NLog.Logger NLogger { get; set; } + } +} diff --git a/ETLBox/Toolbox/Logging/ReadLoadProcessTableTask.cs b/ETLBox/Toolbox/Logging/ReadLoadProcessTableTask.cs new file mode 100644 index 00000000..84da9f34 --- /dev/null +++ b/ETLBox/Toolbox/Logging/ReadLoadProcessTableTask.cs @@ -0,0 +1,115 @@ +using System; +using System.Collections.Generic; + +namespace ALE.ETLBox { + public class ReadLoadProcessTableTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "LOADPROCESS_READ"; + public override string TaskName => $"Read process with Key ({LoadProcessKey}) or without"; + public override void Execute() { + LoadProcess = new LoadProcess(); + var sql = new SqlTask(this, Sql) { + DisableLogging = true, + DisableExtension = true, + Actions = new List>() { + col => LoadProcess.LoadProcessKey = (int)col, + col => LoadProcess.StartDate = (DateTime)col, + col => LoadProcess.TransferCompletedDate = (DateTime?)col, + col => LoadProcess.EndDate = (DateTime?)col, + col => LoadProcess.ProcessName = (string)col, + col => LoadProcess.StartMessage = (string)col, + col => LoadProcess.IsRunning = (bool)col, + col => LoadProcess.EndMessage = (string)col, + col => LoadProcess.WasSuccessful = (bool)col, + col => LoadProcess.AbortMessage = (string)col, + col => LoadProcess.WasAborted= (bool)col, + col => LoadProcess.IsFinished= (bool)col, + col => LoadProcess.IsTransferCompleted= (bool)col + } + }; + if (ReadOption == ReadOptions.ReadAllProcesses) { + sql.BeforeRowReadAction = () => AllLoadProcesses = new List(); + sql.AfterRowReadAction = () => AllLoadProcesses.Add(LoadProcess); + } + sql.ExecuteReader(); + } + + /* Public properties */ + public int? _loadProcessKey; + public int? LoadProcessKey { + get { + return _loadProcessKey ?? ControlFlow.CurrentLoadProcess?.LoadProcessKey; + } + set { + _loadProcessKey = value; + } + } + public LoadProcess LoadProcess { get; private set; } + public List AllLoadProcesses { get; set; } + + public LoadProcess LastFinished { get; private set; } + public LoadProcess LastTransfered { get; private set; } + public ReadOptions ReadOption { get; set; } = ReadOptions.ReadSingleProcess; + + public string Sql { + get { + string top1 = ""; + if (ReadOption != ReadOptions.ReadAllProcesses) + top1 = "top 1"; + string sql = $@" +select {top1} LoadProcessKey, StartDate, TransferCompletedDate, EndDate, ProcessName, StartMessage, IsRunning, EndMessage, WasSuccessful, AbortMessage, WasAborted, IsFinished, IsTransferCompleted +from etl.LoadProcess "; + if (ReadOption == ReadOptions.ReadSingleProcess) + sql += $@"where LoadProcessKey = {LoadProcessKey}"; + else if (ReadOption == ReadOptions.ReadLastFinishedProcess) + sql += $@"where IsFinished = 1 +order by EndDate desc, LoadProcessKey desc"; + else if (ReadOption == ReadOptions.ReadLastSuccessful) + sql += $@"where WasSuccessful = 1 +order by EndDate desc, LoadProcessKey desc"; + else if (ReadOption == ReadOptions.ReadLastAborted) + sql += $@"where WasAborted = 1 +order by EndDate desc, LoadProcessKey desc"; + else if (ReadOption == ReadOptions.ReadLastTransferedProcess) + sql += $@"where IsTransferCompleted = 1 +order by TransferCompletedDate desc, +LoadProcessKey desc"; + + return sql; + } + } + + public ReadLoadProcessTableTask() { + + } + public ReadLoadProcessTableTask(int? loadProcessKey) : this(){ + this.LoadProcessKey = loadProcessKey; + } + + public static LoadProcess Read(int? loadProcessKey) { + var sql = new ReadLoadProcessTableTask(loadProcessKey); + sql.Execute(); + return sql.LoadProcess; + } + public static List ReadAll() { + var sql = new ReadLoadProcessTableTask() { ReadOption = ReadOptions.ReadAllProcesses }; + sql.Execute(); + return sql.AllLoadProcesses; + } + + public static LoadProcess ReadWithOption(ReadOptions option) { + var sql = new ReadLoadProcessTableTask() { ReadOption = option }; + sql.Execute(); + return sql.LoadProcess; + } + } + + public enum ReadOptions { + ReadSingleProcess, + ReadAllProcesses, + ReadLastFinishedProcess, + ReadLastTransferedProcess, + ReadLastSuccessful, + ReadLastAborted + } +} diff --git a/ETLBox/Toolbox/Logging/ReadLogTableTask.cs b/ETLBox/Toolbox/Logging/ReadLogTableTask.cs new file mode 100644 index 00000000..da1bf973 --- /dev/null +++ b/ETLBox/Toolbox/Logging/ReadLogTableTask.cs @@ -0,0 +1,67 @@ +using System; +using System.Collections.Generic; + +namespace ALE.ETLBox { + public class ReadLogTableTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "LOG_READLOG"; + public override string TaskName => $"Read all log entries for {LoadProcessKey ?? 0 }"; + public override void Execute() { + LogEntries = new List(); + LogEntry current = new LogEntry(); + new SqlTask(this, Sql) { + DisableLogging = true, + DisableExtension = true, + BeforeRowReadAction = () => current = new LogEntry(), + AfterRowReadAction = () => LogEntries.Add(current), + Actions = new List>() { + col => current.LogKey = (int)col, + col => current.LogDate = (DateTime)col, + col => current.Level = (string)col, + col => current.Message = (string)col, + col => current.TaskType = (string)col, + col => current.TaskAction = (string)col, + col => current.TaskHash = (string)col, + col => current.Stage = (string)col, + col => current.Source = (string)col, + col => current.LoadProcessKey = (int?)col, + } + }.ExecuteReader(); + } + + /* Public properties */ + public int? _loadProcessKey; + public int? LoadProcessKey { + get { + return _loadProcessKey ?? ControlFlow.CurrentLoadProcess?.LoadProcessKey; + } + set { + _loadProcessKey = value; + } + } + + public ReadLogTableTask ReadLog() { + Execute(); + return this; + } + + public List LogEntries { get; private set; } + + public string Sql => $@"select LogKey, LogDate, Level, Message, TaskType, TaskAction, TaskHash, Stage, Source, LoadProcessKey + from etl.Log" + + (LoadProcessKey != null ? $@" where LoadProcessKey = {LoadProcessKey}" + : ""); + + public ReadLogTableTask() { + + } + + public ReadLogTableTask(int? loadProcessKey) : this() { + this.LoadProcessKey = loadProcessKey; + } + + public static List Read() => new ReadLogTableTask().ReadLog().LogEntries; + public static List Read(int? loadProcessKey) => new ReadLogTableTask(loadProcessKey).ReadLog().LogEntries; + + } +} diff --git a/ETLBox/Toolbox/Logging/RemoveLogTablesTask.cs b/ETLBox/Toolbox/Logging/RemoveLogTablesTask.cs new file mode 100644 index 00000000..47da3167 --- /dev/null +++ b/ETLBox/Toolbox/Logging/RemoveLogTablesTask.cs @@ -0,0 +1,21 @@ +namespace ALE.ETLBox { + public class RemoveLogTablesTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "REMOVELOG"; + public override string TaskName => $"Remove log tables"; + public override void Execute() => new SqlTask(this, Sql).ExecuteNonQuery(); + public string Sql => $@" +if (object_id('etl.Log') is not null) drop table etl.Log +if (object_id('etl.LoadProcess') is not null) drop table etl.LoadProcess +if (object_id('etl.AbortLoadProcess') is not null) drop procedure etl.AbortLoadProcess +if (object_id('etl.EndLoadProcess') is not null) drop procedure etl.EndLoadProcess +if (object_id('etl.StartLoadProcess') is not null) drop procedure etl.StartLoadProcess +if (object_id('etl.TransferCompletedForLoadProcess') is not null) drop procedure etl.TransferCompletedForLoadProcess +"; + + public RemoveLogTablesTask() { } + public static void Remove() => new RemoveLogTablesTask().Execute(); + + + } +} diff --git a/ETLBox/Toolbox/Logging/StartLoadProcessTask.cs b/ETLBox/Toolbox/Logging/StartLoadProcessTask.cs new file mode 100644 index 00000000..138af952 --- /dev/null +++ b/ETLBox/Toolbox/Logging/StartLoadProcessTask.cs @@ -0,0 +1,59 @@ +namespace ALE.ETLBox { + public class StartLoadProcessTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "LOADPROCESS_START"; + public override string TaskName => $"Start load process {ProcessName}"; + public override void Execute() { + LoadProcessKey = new SqlTask(this, Sql) { DisableLogging = true }.ExecuteScalar(); + var rlp = new ReadLoadProcessTableTask(LoadProcessKey) { TaskType = this.TaskType, TaskHash = this.TaskHash, DisableLogging = true }; + rlp.Execute(); + ControlFlow.CurrentLoadProcess = rlp.LoadProcess; + } + + /* Public properties */ + public string ProcessName { get; set; } = "N/A"; + public string StartMessage { get; set; } + public string Source { get; set; } + + public int? _loadProcessKey; + public int? LoadProcessKey { + get { + return _loadProcessKey ?? ControlFlow.CurrentLoadProcess?.LoadProcessKey; + } + set { + _loadProcessKey = value; + } + } + + public string Sql => $@" + declare @LoadProcessKey int + EXECUTE etl.StartLoadProcess + @ProcessName = '{ProcessName}', + @StartMessage = {StartMessage.NullOrSqlString()}, + @Source = {Source.NullOrSqlString()}, + @LoadProcessKey = @LoadProcessKey OUTPUT + SELECT @LoadProcessKey"; + + + public StartLoadProcessTask() { + + } + public StartLoadProcessTask(string processName) : this(){ + this.ProcessName = processName; + } + public StartLoadProcessTask(string processName,string startMessage) : this(processName) { + this.StartMessage = startMessage; + } + + public StartLoadProcessTask(string processName, string startMessage, string source) : this(processName, startMessage) { + this.Source = source; + } + + public static void Start(string processName) => new StartLoadProcessTask(processName).Execute(); + public static void Start(string processName, string startMessage) => new StartLoadProcessTask(processName, startMessage).Execute(); + + public static void Start(string processName, string startMessage, string source) => new StartLoadProcessTask(processName, startMessage, source).Execute(); + + + } +} diff --git a/ETLBox/Toolbox/Logging/TransferCompletedForLoadProcessTask.cs b/ETLBox/Toolbox/Logging/TransferCompletedForLoadProcessTask.cs new file mode 100644 index 00000000..7f3acf71 --- /dev/null +++ b/ETLBox/Toolbox/Logging/TransferCompletedForLoadProcessTask.cs @@ -0,0 +1,40 @@ +namespace ALE.ETLBox { + public class TransferCompletedForLoadProcessTask : GenericTask, ITask { + /* ITask Interface */ + public override string TaskType { get; set; } = "TRANSFERCOMPLETE"; + public override string TaskName => $"Set transfer completed for {LoadProcessKey}"; + public override void Execute() { + new SqlTask(this, Sql).ExecuteNonQuery(); + var rlp = new ReadLoadProcessTableTask(LoadProcessKey) { TaskType = this.TaskType, TaskHash = this.TaskHash, DisableLogging = true }; + rlp.Execute(); + ControlFlow.CurrentLoadProcess = rlp.LoadProcess; + } + + /* Public properties */ + public int? _loadProcessKey; + public int? LoadProcessKey { + get { + return _loadProcessKey ?? ControlFlow.CurrentLoadProcess?.LoadProcessKey; + } + set { + _loadProcessKey = value; + } + } + + public string Sql => $@"EXECUTE etl.TransferCompletedForLoadProcess + @LoadProcessKey = '{LoadProcessKey}'"; + + public TransferCompletedForLoadProcessTask() { + + } + + public TransferCompletedForLoadProcessTask(int? loadProcessKey) : this() { + this.LoadProcessKey = loadProcessKey; + } + + public static void Complete() => new TransferCompletedForLoadProcessTask().Execute(); + public static void Complete(int? loadProcessKey) => new TransferCompletedForLoadProcessTask(loadProcessKey).Execute(); + + + } +} diff --git a/ETLBox/packages.config b/ETLBox/packages.config new file mode 100644 index 00000000..149beabc --- /dev/null +++ b/ETLBox/packages.config @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/ETLBoxTest/App.config b/ETLBoxTest/App.config new file mode 100644 index 00000000..4381ff76 --- /dev/null +++ b/ETLBoxTest/App.config @@ -0,0 +1,50 @@ + + + +
+ + + + + + + + + + + + + + insert into etl.Log (LogDate, Level, Stage, Message, TaskType, TaskAction, TaskHash, Source, LoadProcessKey) + select @LogDate + , @Level + , cast(@Stage as nvarchar(20)) + , cast(@Message as nvarchar(4000)) + , cast(@Type as nvarchar(40)) + , @Action + , @Hash + , cast(@Logger as nvarchar(20)) + , case when @LoadProcessKey=0 then null else @LoadProcessKey end + + + + + + + + + + + + + + + + + + + diff --git a/ETLBoxTest/ControlFlow/TestCustomTask.cs b/ETLBoxTest/ControlFlow/TestCustomTask.cs new file mode 100644 index 00000000..05330d50 --- /dev/null +++ b/ETLBoxTest/ControlFlow/TestCustomTask.cs @@ -0,0 +1,64 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestCustomTask { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + } + + public bool Action1Executed { get; set; } + + [TestMethod] + public void TestCustomTask1() { + CustomTask.Execute("Test custom task 1", Action1); + Assert.IsTrue(Action1Executed); + } + + public void Action1() { + Action1Executed = true; + } + + public int Action2Value { get; set; } + + [TestMethod] + public void TestCustomTask2() { + CustomTask.Execute("Test custom task 2", Action2, 5); + Assert.AreEqual(5, Action2Value); + } + + public void Action2(int param1) { + Action2Value = param1; + } + + public string Action3Value1 { get; set; } + public bool Action3Value2 { get; set; } + + [TestMethod] + public void TestCustomTask3() { + CustomTask.Execute("Test custom task 3", Action3, "t",true); + Assert.AreEqual("t", Action3Value1); + Assert.IsTrue(Action3Value2); + } + + public void Action3(string param1, bool param2) { + Action3Value1 = param1; + Action3Value2 = param2; + } + + [TestMethod] + public void TestLogging() { + CreateLogTablesTask.CreateLog(); + CustomTask.Execute("Test custom task 4", Action1); + Assert.AreEqual(2, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='CUSTOM' group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + } + + } +} diff --git a/ETLBoxTest/ControlFlow/TestSequence.cs b/ETLBoxTest/ControlFlow/TestSequence.cs new file mode 100644 index 00000000..5c942f96 --- /dev/null +++ b/ETLBoxTest/ControlFlow/TestSequence.cs @@ -0,0 +1,53 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestSequence { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + } + + public bool Action1Executed { get; set; } + + [TestMethod] + public void TestSequence1() { + Sequence.Execute("Test sequence 1", Action1); + Assert.IsTrue(Action1Executed); + } + + public void Action1() { + Action1Executed = true; + } + + public bool Action2Executed { get; set; } + + [TestMethod] + public void TestSequence2() { + string test = "Test"; + Sequence.Execute("Test sequence 2", Action2, test); + Assert.IsTrue(Action2Executed); + } + + public void Action2(object parent) { + Action2Executed = true; + Assert.AreEqual("Test", parent); + } + + [TestMethod] + public void TestLogging() { + CreateLogTablesTask.CreateLog(); + Sequence.Execute("Test sequence 3", Action1); + Assert.AreEqual(2, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='SEQUENCE' group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + } + + + + } +} diff --git a/ETLBoxTest/Cube/TestDropCubeTask.cs b/ETLBoxTest/Cube/TestDropCubeTask.cs new file mode 100644 index 00000000..cfaf72a0 --- /dev/null +++ b/ETLBoxTest/Cube/TestDropCubeTask.cs @@ -0,0 +1,32 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestDropCubeTask { + public TestContext TestContext { get; set; } + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateCube(testContext); + string connectionString = testContext.Properties["connectionString"].ToString(); + ControlFlow.CurrentASConnection = new ASConnectionManager(new ConnectionString(connectionString)); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(connectionString)); + } + + [TestMethod] + public void TestDropCube() { + DropCubeTask.Execute("Drop cube"); + } + + [TestMethod] + public void TestLogging() { + TestHelper.RecreateDatabase(TestContext); + CreateLogTablesTask.CreateLog(); + DropCubeTask.Execute("Drop cube"); + Assert.AreEqual(2, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='DROPCUBE' group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + } + + + } +} diff --git a/ETLBoxTest/Cube/TestProcessCubeTask.cs b/ETLBoxTest/Cube/TestProcessCubeTask.cs new file mode 100644 index 00000000..a91b6eba --- /dev/null +++ b/ETLBoxTest/Cube/TestProcessCubeTask.cs @@ -0,0 +1,33 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestProcessCubeTask { + public TestContext TestContext { get; set; } + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateCube(testContext); + string connectionString = testContext.Properties["connectionString"].ToString(); + ControlFlow.CurrentASConnection = new ASConnectionManager(new ConnectionString(connectionString)); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(connectionString)); + } + + [TestMethod] + public void TestProcess() { + ProcessCubeTask.Process("Process cube test"); + } + + [TestMethod] + public void TestLogging() { + TestHelper.RecreateDatabase(TestContext); + CreateLogTablesTask.CreateLog(); + ProcessCubeTask.Process("Process cube test"); + Assert.AreEqual(2, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='PROCESSCUBE' group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + } + + + + } +} diff --git a/ETLBoxTest/Cube/TestXMLATask.cs b/ETLBoxTest/Cube/TestXMLATask.cs new file mode 100644 index 00000000..740b6772 --- /dev/null +++ b/ETLBoxTest/Cube/TestXMLATask.cs @@ -0,0 +1,29 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestXMLATask { + public TestContext TestContext { get; set; } + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + string connectionString = testContext.Properties["connectionString"].ToString(); + ControlFlow.CurrentAdomdConnection = new AdomdConnectionManager(new ConnectionString(connectionString).GetConnectionWithoutCatalog()); + } + + + [TestMethod] + public void TestCreateDelete() { + string dbName = TestContext.Properties["dbName"].ToString(); + try { + XmlaTask.ExecuteNonQuery("Drop cube", TestHelper.DeleteCubeXMLA(dbName)); + } + catch { } + XmlaTask.ExecuteNonQuery("Create cube", TestHelper.CreateCubeXMLA(dbName)); + XmlaTask.ExecuteNonQuery("Delete cube", TestHelper.DeleteCubeXMLA(dbName)); + } + + + } +} diff --git a/ETLBoxTest/DataFlow/Simple_CSV2DB.csv b/ETLBoxTest/DataFlow/Simple_CSV2DB.csv new file mode 100644 index 00000000..344489a0 --- /dev/null +++ b/ETLBoxTest/DataFlow/Simple_CSV2DB.csv @@ -0,0 +1,4 @@ +Header1,Header2 +ValueRow1,100 +ValueRow2,0 +ValueRow3,300 \ No newline at end of file diff --git a/ETLBoxTest/DataFlow/TestDataFlowBigData.cs b/ETLBoxTest/DataFlow/TestDataFlowBigData.cs new file mode 100644 index 00000000..ad3c9da1 --- /dev/null +++ b/ETLBoxTest/DataFlow/TestDataFlowBigData.cs @@ -0,0 +1,72 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.Collections.Generic; +using System.Diagnostics; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestDataFlowBigData { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void ClassInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestInitialize] + public void TestInit() { + CleanUpSchemaTask.CleanUp("test"); + } + + /* + * CSVSource (out: string[]) -> DBDestination (in: string[]) + * Table without key columns (HEAP) + * 1.000.000 Rows with 8007 bytes per Row (8000 bytes data + 7 bytes for sql server) + */ + [TestMethod] + public void BigData_CSV_DB() { + BigData_CSV_DB(100000); + } + + public void BigData_CSV_DB(int numberOfRows) { + Stopwatch watch = new Stopwatch(); + TableDefinition stagingTable = new TableDefinition("test.Staging", new List() { + new TableColumn("Col1", "nchar(1000)", allowNulls: false), + new TableColumn("Col2", "nchar(1000)", allowNulls: false), + new TableColumn("Col3", "nchar(1000)", allowNulls: false), + new TableColumn("Col4", "nchar(1000)", allowNulls: false), + }); + stagingTable.CreateTable(); + string fileName = "DataFlow/BigData_CSV2DB.csv"; + BigDataHelper bigData = new BigDataHelper() { + FileName = fileName, + NumberOfRows = numberOfRows, + TableDefinition = stagingTable + }; + watch.Start(); + LogTask.Info($"Create .csv file {fileName} with {numberOfRows} Rows"); + bigData.CreateBigDataCSV(); + LogTask.Info($"Needed {watch.Elapsed.TotalMinutes} to create .csv file"); + watch.Reset(); + + CSVSource source = new CSVSource(fileName); + DBDestination dest = new DBDestination(1000) { DestinationTableDefinition = stagingTable }; + source.LinkTo(dest); + + watch.Start(); + source.Execute(); + LogTask.Info($"Needed {watch.Elapsed.TotalMinutes} to read everything into memory (while constantly writing)"); + LogTask.Info($"Already {RowCountTask.Count("test.Staging", RowCountOptions.QuickQueryMode)} inserted into table"); + dest.Wait(); //TODO Wait should be part of source + LogTask.Info($"Needed {watch.Elapsed.TotalMinutes} to write everything into database"); + + Assert.AreEqual(numberOfRows, SqlTask.ExecuteScalar("Check staging table", $"select count(*) from test.Staging")); + } + + } + +} diff --git a/ETLBoxTest/DataFlow/TestDataFlowBlockTransformation.cs b/ETLBoxTest/DataFlow/TestDataFlowBlockTransformation.cs new file mode 100644 index 00000000..4d614c8c --- /dev/null +++ b/ETLBoxTest/DataFlow/TestDataFlowBlockTransformation.cs @@ -0,0 +1,77 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.Collections.Generic; +using System.Linq; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestDataFlowBlockTransformation { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void ClassInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestInitialize] + public void TestInit() { + CleanUpSchemaTask.CleanUp("test"); + } + + public class MySimpleRow { + public string Value1 { get; set; } + public int Value2 { get; set; } + } + + + /* + * DSBSource (out: object) -> BlockTransformation (in/out: object) -> DBDestination (in: object) + */ + [TestMethod] + public void DB_BlockTrans_DB() { + TableDefinition sourceTableDefinition = CreateSourceTable("test.Source"); + TableDefinition destinationTableDefinition = CreateDestinationTable("test.Destination"); + + DBSource source = new DBSource(sourceTableDefinition); + DBDestination dest = new DBDestination(destinationTableDefinition); + BlockTransformation block = new BlockTransformation( + inputData => { + return inputData.Select( row => new MySimpleRow() { Value1 = row.Value1, Value2 = 3 }).ToList(); + }); + source.LinkTo(block); + block.LinkTo(dest); + source.Execute(); + dest.Wait(); + Assert.AreEqual(3, RowCountTask.Count("test.Destination","Col2 in (3)")); + } + + private static TableDefinition CreateSourceTable(string tableName) { + TableDefinition sourceTableDefinition = new TableDefinition(tableName, new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: false), + new TableColumn("Col2", "int", allowNulls: true) + }); + sourceTableDefinition.CreateTable(); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test1',1)"); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test2',2)"); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test3',3)"); + return sourceTableDefinition; + } + + private static TableDefinition CreateDestinationTable(string tableName) { + TableDefinition destinationTableDefinition = new TableDefinition(tableName, new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: false), + new TableColumn("Col2", "int", allowNulls: true) + }); + destinationTableDefinition.CreateTable(); + return destinationTableDefinition; + } + + + + } + +} diff --git a/ETLBoxTest/DataFlow/TestDataFlowCSVSource.cs b/ETLBoxTest/DataFlow/TestDataFlowCSVSource.cs new file mode 100644 index 00000000..b0b136b8 --- /dev/null +++ b/ETLBoxTest/DataFlow/TestDataFlowCSVSource.cs @@ -0,0 +1,109 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.Collections.Generic; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestDataFlowCSVSource { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void ClassInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestInitialize] + public void TestInit() { + CleanUpSchemaTask.CleanUp("test"); + } + /* + * CSVSource (out: string[]) -> DBDestination (in: string[]) + * Table without key columns + */ + [TestMethod] + public void CSV_DB() { + TableDefinition stagingTable = new TableDefinition("test.Staging", new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: false), + new TableColumn("Col2", "int", allowNulls: true) + }); + stagingTable.CreateTable(); + CSVSource source = new CSVSource("DataFlow/Simple_CSV2DB.csv"); + DBDestination dest = new DBDestination() { DestinationTableDefinition = stagingTable }; + source.LinkTo(dest); + + source.Execute(); + dest.Wait(); //TODO Wait should be part of source + + Assert.AreEqual(3, SqlTask.ExecuteScalar("Check staging table", $"select count(*) from test.Staging where Col1 Like '%ValueRow%' and Col2 <> 1")); + } + + /* + * CSVSource (out: string[]) -> DBDestination (in: string[]) + * Table with key column (at different positions) + */ + [TestMethod] + public void CSV_DB_WithKeyPosition1() { + CSV_DB_WithKey(0); + } + + [TestMethod] + public void CSV_DB_WithKeyPosition2() { + CSV_DB_WithKey(1); + } + + [TestMethod] + public void CSV_DB_WithKeyPosition3() { + CSV_DB_WithKey(2); + } + + public void CSV_DB_WithKey(int keyPosition) { + List columns = new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: false), + new TableColumn("Col2", "int", allowNulls: true) }; + columns.Insert(keyPosition, new TableColumn("Key", "int", allowNulls: false, isPrimaryKey: true) { IsIdentity = true }); + TableDefinition stagingTable = new TableDefinition($"test.Staging{keyPosition}", columns); + stagingTable.CreateTable(); + CSVSource source = new CSVSource("DataFlow/Simple_CSV2DB.csv"); + DBDestination dest = new DBDestination() { DestinationTableDefinition = stagingTable }; + source.LinkTo(dest); + + source.Execute(); + dest.Wait(); //TODO Wait should be part of source + + Assert.AreEqual(3, SqlTask.ExecuteScalar("Check staging table", $"select count(*) from test.Staging{keyPosition} where Col1 Like '%ValueRow%' and Col2 <> 1")); + } + + /* + * CSVSource (out: string[]) -> DBDestination (in: string[]) + */ + [TestMethod] + public void CSV_DB_WithBatchChanges() { + TableDefinition stagingTable = new TableDefinition("test.Staging", new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: false), + new TableColumn("Col2", "int", allowNulls: true) + }); + stagingTable.CreateTable(); + CSVSource source = new CSVSource("DataFlow/Simple_CSV2DB.csv"); + DBDestination dest = new DBDestination(batchSize: 2) { + DestinationTableDefinition = stagingTable, + BeforeBatchWrite = + rowArray => { + rowArray[0][0] = "NewValue"; + return rowArray; + } + }; + source.LinkTo(dest); + + source.Execute(); + dest.Wait(); //TODO Wait should be part of source + + Assert.AreEqual(1, SqlTask.ExecuteScalar("Check staging table", $"select count(*) from test.Staging where Col1 Like '%ValueRow%' and Col2 <> 1")); + Assert.AreEqual(2, SqlTask.ExecuteScalar("Check staging table", $"select count(*) from test.Staging where Col1 = 'NewValue'")); + } + } + +} diff --git a/ETLBoxTest/DataFlow/TestDataFlowCustomDestination.cs b/ETLBoxTest/DataFlow/TestDataFlowCustomDestination.cs new file mode 100644 index 00000000..d4cf36dd --- /dev/null +++ b/ETLBoxTest/DataFlow/TestDataFlowCustomDestination.cs @@ -0,0 +1,74 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.Collections.Generic; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestDataFlowCustomDestination { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void ClassInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestInitialize] + public void TestInit() { + CleanUpSchemaTask.CleanUp("test"); + } + + public class MySimpleRow { + public string Value1 { get; set; } + public int Value2 { get; set; } + } + + /* + * DSBSource (out: object) -> CustomDestination (in: object) + */ + [TestMethod] + public void DB_CustDest() { + TableDefinition sourceTableDefinition = CreateSourceTable("test.Source"); + TableDefinition destinationTableDefinition = CreateDestinationTable("test.Destination"); + + DBSource source = new DBSource() { SourceTableDefinition = sourceTableDefinition }; + CustomDestination dest = new CustomDestination( + row => { + SqlTask.ExecuteNonQuery("Insert row", $"insert into test.Destination values('{row.Value1}',{row.Value2})"); + } + ); + source.LinkTo(dest); + source.Execute(); + dest.Wait(); + Assert.AreEqual(3, SqlTask.ExecuteScalar("Check destination table", "select count(*) from test.Destination")); + } + + private static TableDefinition CreateSourceTable(string tableName) { + TableDefinition sourceTableDefinition = new TableDefinition(tableName, new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: false), + new TableColumn("Col2", "int", allowNulls: true) + }); + sourceTableDefinition.CreateTable(); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test1',1)"); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test2',2)"); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test3',3)"); + return sourceTableDefinition; + } + + private static TableDefinition CreateDestinationTable(string tableName) { + TableDefinition destinationTableDefinition = new TableDefinition(tableName, new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: false), + new TableColumn("Col2", "int", allowNulls: true) + }); + destinationTableDefinition.CreateTable(); + return destinationTableDefinition; + } + + + + } + +} diff --git a/ETLBoxTest/DataFlow/TestDataFlowCustomSource.cs b/ETLBoxTest/DataFlow/TestDataFlowCustomSource.cs new file mode 100644 index 00000000..c9f8b103 --- /dev/null +++ b/ETLBoxTest/DataFlow/TestDataFlowCustomSource.cs @@ -0,0 +1,75 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.Collections.Generic; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestDataFlowCustomSource { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void ClassInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestInitialize] + public void TestInit() { + CleanUpSchemaTask.CleanUp("test"); + } + + public class MySimpleRow { + public string Value1 { get; set; } + public int Value2 { get; set; } + } + + public class CustomRowReader { + public List Data { get; set; } = new List() { "Test1", "Test2", "Test3" }; + public int _readIndex = 0; + public MySimpleRow ReadData() { + var result = new MySimpleRow() { + Value1 = Data[_readIndex], + Value2 = _readIndex + }; + _readIndex++; + return result; + } + + public bool EndOfData() { + return _readIndex >= Data.Count; + } + } + + /* + * CustomSource (out: object) -> DBDestination (in: object) + */ + [TestMethod] + public void CustSource_DB() { + TableDefinition destinationTableDefinition = CreateDestinationTable("test.Destination"); + + CustomRowReader rowReaderClass = new CustomRowReader(); + CustomSource source = new CustomSource(rowReaderClass.ReadData, rowReaderClass.EndOfData); + DBDestination dest = new DBDestination(destinationTableDefinition); + source.LinkTo(dest); + source.Execute(); + dest.Wait(); + Assert.AreEqual(3, SqlTask.ExecuteScalar("Check destination table", "select count(*) from test.Destination")); + } + + private static TableDefinition CreateDestinationTable(string tableName) { + TableDefinition destinationTableDefinition = new TableDefinition(tableName, new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: false), + new TableColumn("Col2", "int", allowNulls: true) + }); + destinationTableDefinition.CreateTable(); + return destinationTableDefinition; + } + + + + } + +} diff --git a/ETLBoxTest/DataFlow/TestDataFlowDBSource.cs b/ETLBoxTest/DataFlow/TestDataFlowDBSource.cs new file mode 100644 index 00000000..861b342b --- /dev/null +++ b/ETLBoxTest/DataFlow/TestDataFlowDBSource.cs @@ -0,0 +1,60 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.Collections.Generic; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestDataFlowDBSource { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void ClassInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestInitialize] + public void TestInit() { + CleanUpSchemaTask.CleanUp("test"); + } + + public class MySimpleRow { + public string Value1 { get; set; } + public int Value2 { get; set; } + } + + /* + * DSBSource (out: object) -> DBDestination (in: object) + */ + [TestMethod] + public void DB_DB() { + TableDefinition sourceTableDefinition = new TableDefinition("test.Source", new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: false), + new TableColumn("Col2", "int", allowNulls: true) + }); + sourceTableDefinition.CreateTable(); + SqlTask.ExecuteNonQuery("Insert demo data", "insert into test.Source values('Test1',1)"); + SqlTask.ExecuteNonQuery("Insert demo data", "insert into test.Source values('Test2',2)"); + SqlTask.ExecuteNonQuery("Insert demo data", "insert into test.Source values('Test3',3)"); + + TableDefinition destinationTableDefinition = new TableDefinition("test.Destination", new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: false), + new TableColumn("Col2", "int", allowNulls: true) + }); + destinationTableDefinition.CreateTable(); + + DBSource source = new DBSource() { SourceTableDefinition = sourceTableDefinition }; + DBDestination dest = new DBDestination() { DestinationTableDefinition = destinationTableDefinition }; + source.LinkTo(dest); + source.Execute(); + dest.Wait(); + Assert.AreEqual(3, SqlTask.ExecuteScalar("Check destination table", "select count(*) from test.Destination")); + } + + + } + +} diff --git a/ETLBoxTest/DataFlow/TestDataFlowLookup.cs b/ETLBoxTest/DataFlow/TestDataFlowLookup.cs new file mode 100644 index 00000000..bba64dbf --- /dev/null +++ b/ETLBoxTest/DataFlow/TestDataFlowLookup.cs @@ -0,0 +1,115 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.Collections.Generic; +using System.Linq; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestDataFlowLookup { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void ClassInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestInitialize] + public void TestInit() { + CleanUpSchemaTask.CleanUp("test"); + } + + public class MyLookupRow { + public int Key { get; set; } + public string LookupValue { get; set; } + } + + public class MyInputDataRow { + public string Value1 { get; set; } + public int Value2 { get; set; } + } + + public class MyOutputDataRow { + public string Value1 { get; set; } + public string LookupValue2 { get; set; } + } + + /* + * DBSource (out: MyInputDataRow) + * -> Lookup (in: MyInputDataRow, out: MyOutputDataRow, lookup: DBSource(out: MyLooupRow) ) + * -> DBDestination (in: MyOutputDataRow) + */ + [TestMethod] + public void DB_Lookup_DB() { + TableDefinition sourceTableDefinition = CreateDBSourceTableForInputRow(); + TableDefinition destinationTableDefinition = CreateDBDestinationTableForOutputRow(); + TableDefinition lookupTableDefinition = CreateDBLookupTable(); + + TransformationTestClass testClass = new TransformationTestClass(); + DBSource source = new DBSource() { SourceTableDefinition = sourceTableDefinition }; + DBSource lookupSource = new DBSource() { SourceTableDefinition = lookupTableDefinition }; + Lookup lookup = new Lookup( + testClass.TestTransformationFunc, lookupSource, testClass.LookupData + ); + DBDestination dest = new DBDestination() { DestinationTableDefinition = destinationTableDefinition }; + source.LinkTo(lookup); + lookup.LinkTo(dest); + source.Execute(); + dest.Wait(); + Assert.AreEqual(1, SqlTask.ExecuteScalar("Check destination table", "select count(*) from test.Destination where Col1 = 'Test1' and Col2 = 'Lookup for 1'")); + Assert.AreEqual(1, SqlTask.ExecuteScalar("Check destination table", "select count(*) from test.Destination where Col1 = 'Test2' and Col2 = 'Lookup for 2'")); + Assert.AreEqual(1, SqlTask.ExecuteScalar("Check destination table", "select count(*) from test.Destination where Col1 = 'Test3' and Col2 = 'Lookup for 3'")); + } + + internal TableDefinition CreateDBSourceTableForInputRow() { + TableDefinition sourceTableDefinition = new TableDefinition("test.Source", new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: false), + new TableColumn("Col2", "int", allowNulls: true) + }); + sourceTableDefinition.CreateTable(); + SqlTask.ExecuteNonQuery("Insert demo data", "insert into test.Source values('Test1',1)"); + SqlTask.ExecuteNonQuery("Insert demo data", "insert into test.Source values('Test2',2)"); + SqlTask.ExecuteNonQuery("Insert demo data", "insert into test.Source values('Test3',3)"); + return sourceTableDefinition; + } + + internal static TableDefinition CreateDBLookupTable() { + TableDefinition sourceTableDefinition = new TableDefinition("test.Lookup", new List() { + new TableColumn("Key", "int", allowNulls: false), + new TableColumn("Col1", "nvarchar(100)", allowNulls: true) + }); + sourceTableDefinition.CreateTable(); + SqlTask.ExecuteNonQuery("Insert demo data", "insert into test.Lookup values(1, 'Lookup for 1')"); + SqlTask.ExecuteNonQuery("Insert demo data", "insert into test.Lookup values(2, 'Lookup for 2')"); + SqlTask.ExecuteNonQuery("Insert demo data", "insert into test.Lookup values(3, 'Lookup for 3')"); + return sourceTableDefinition; + } + + internal TableDefinition CreateDBDestinationTableForOutputRow() { + TableDefinition destinationTableDefinition = new TableDefinition("test.Destination", new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: true), + new TableColumn("Col2", "nvarchar(100)", allowNulls: true) + }); + destinationTableDefinition.CreateTable(); + return destinationTableDefinition; + } + + public class TransformationTestClass { + public int AddValue { get; set; } = 0; + + public List LookupData { get; set; } = new List(); + + public MyOutputDataRow TestTransformationFunc(MyInputDataRow myRow) { + MyOutputDataRow output = new MyOutputDataRow() { + Value1 = myRow.Value1, + LookupValue2 = LookupData.Where(ld => ld.Key == myRow.Value2).Select(ld=>ld.LookupValue).FirstOrDefault() + }; + return output; + } + } + } + +} diff --git a/ETLBoxTest/DataFlow/TestDataFlowMergeJoin.cs b/ETLBoxTest/DataFlow/TestDataFlowMergeJoin.cs new file mode 100644 index 00000000..d2131c1f --- /dev/null +++ b/ETLBoxTest/DataFlow/TestDataFlowMergeJoin.cs @@ -0,0 +1,103 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.Collections.Generic; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestDataFlowMergeJoin { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void ClassInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestInitialize] + public void TestInit() { + CleanUpSchemaTask.CleanUp("test"); + } + + public class MyDataRow1 { + public string Value1 { get; set; } + public int Value2 { get; set; } + } + + public class MyDataRow2 { + public int Value3 { get; set; } + } + + + /* + * 1-DBSource (out: MyDataRow1) + * 2-DBSource (out: MyDataRow2) + * 1,2-> Join (in: MyDataRow1, in: MyDataRow2, out: MyDataRow1) + * -> DBDestination (in: MyDataRow1) + */ + [TestMethod] + public void DB_MergeJoin_DB() { + TableDefinition source1TableDefinition = CreateTableForInput1("test.Source1"); + TableDefinition source2TableDefinition = CreateTableForInput2("test.Source2"); + TableDefinition destTableDefinition = CreateTableForDestination("test.Destination"); + + DBSource source1 = new DBSource() { SourceTableDefinition = source1TableDefinition }; + DBSource source2 = new DBSource() { SourceTableDefinition = source2TableDefinition }; + + MergeJoin join = new MergeJoin( + (input1, input2) => { + input1.Value2 += input2.Value3; + return input1; + }); + + DBDestination dest = new DBDestination() { DestinationTableDefinition = destTableDefinition }; + source1.LinkTo(join.Target1); + source2.LinkTo(join.Target2); + join.LinkTo(dest); + + source1.Execute(); + source2.Execute(); + dest.Wait(); + + Assert.AreEqual(3, RowCountTask.Count("test.Destination","Col2 in (11,102,1003)")); + + + } + + internal TableDefinition CreateTableForInput1(string tableName) { + TableDefinition def = new TableDefinition(tableName, new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: true), + new TableColumn("Col2", "int", allowNulls: true) + }); + def.CreateTable(); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test1',1)"); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test2',2)"); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test3',3)"); + + return def; + } + + internal TableDefinition CreateTableForInput2(string tableName) { + TableDefinition def = new TableDefinition(tableName, new List() { + new TableColumn("Col3", "int", allowNulls: false) + }); + def.CreateTable(); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values(10)"); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values(100)"); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values(1000)"); + return def; + } + + internal TableDefinition CreateTableForDestination(string tableName) { + TableDefinition def = new TableDefinition(tableName, new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: true), + new TableColumn("Col2", "int", allowNulls: true) + }); + def.CreateTable(); + return def; + } + } + +} diff --git a/ETLBoxTest/DataFlow/TestDataFlowMulticast.cs b/ETLBoxTest/DataFlow/TestDataFlowMulticast.cs new file mode 100644 index 00000000..ab9f9bc4 --- /dev/null +++ b/ETLBoxTest/DataFlow/TestDataFlowMulticast.cs @@ -0,0 +1,78 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.Collections.Generic; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestDataFlowMulticast { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void ClassInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestInitialize] + public void TestInit() { + CleanUpSchemaTask.CleanUp("test"); + } + + public class MyDataRow { + public string Value1 { get; set; } + public int Value2 { get; set; } + } + + /* + * DBSource (out: object) + * -> Multicast (in/out: object) + * 1-> DBDestination (in: object) 2-> DBDestination (in: object) + */ + [TestMethod] + public void DB_Multicast_DB() { + TableDefinition sourceTableDefinition = CreateTableForMyDataRow("test.Source"); + TableDefinition dest1TableDefinition = CreateTableForMyDataRow("test.Destination1"); + TableDefinition dest2TableDefinition = CreateTableForMyDataRow("test.Destination2"); + InsertDemoDataForMyRowTable("test.Source"); + + DBSource source = new DBSource(); + source.SourceTableDefinition = sourceTableDefinition; + Multicast multicast = new Multicast(); + DBDestination dest1 = new DBDestination(); + dest1.DestinationTableDefinition = dest1TableDefinition; + DBDestination dest2 = new DBDestination(); + dest2.DestinationTableDefinition = dest2TableDefinition; + + source.LinkTo(multicast); + multicast.LinkTo(dest1); + multicast.LinkTo(dest2); + source.Execute(); + dest1.Wait(); + dest2.Wait(); + + Assert.AreEqual(3, RowCountTask.Count("test.Source","Col2 in (1,2,3)")); + Assert.AreEqual(3, RowCountTask.Count("test.Destination1", "Col2 in (1,2,3)")); + Assert.AreEqual(3, RowCountTask.Count("test.Destination2", "Col2 in (1,2,3)")); + + } + + internal TableDefinition CreateTableForMyDataRow(string tableName) { + TableDefinition def = new TableDefinition(tableName, new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: true), + new TableColumn("Col2", "int", allowNulls: true) + }); + def.CreateTable(); + return def; + } + + private static void InsertDemoDataForMyRowTable(string tableName) { + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test1',1)"); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test2',2)"); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test3',3)"); + } + } + +} diff --git a/ETLBoxTest/DataFlow/TestDataFlowPredicates.cs b/ETLBoxTest/DataFlow/TestDataFlowPredicates.cs new file mode 100644 index 00000000..1a0b48df --- /dev/null +++ b/ETLBoxTest/DataFlow/TestDataFlowPredicates.cs @@ -0,0 +1,79 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System; +using System.Collections.Generic; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestDataFlowPredicates { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void ClassInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestInitialize] + public void TestInit() { + CleanUpSchemaTask.CleanUp("test"); + } + + public class MyDataRow { + public string Value1 { get; set; } + public int Value2 { get; set; } + } + + /* + * DBSource (out: object) + * -> Multicast (in/out: object with predicates) + * 1-> DBDestination (in: object) 2-> DBDestination (in: object) + */ + [TestMethod] + public void DB_MulticastWPredicates_DB() { + TableDefinition sourceTableDefinition = CreateTableForMyDataRow("test.Source"); + TableDefinition dest1TableDefinition = CreateTableForMyDataRow("test.Destination1"); + TableDefinition dest2TableDefinition = CreateTableForMyDataRow("test.Destination2"); + InsertDemoDataForMyRowTable("test.Source"); + + DBSource source = new DBSource(); + source.SourceTableDefinition = sourceTableDefinition; + Multicast multicast = new Multicast(); + DBDestination dest1 = new DBDestination(); + dest1.DestinationTableDefinition = dest1TableDefinition; + DBDestination dest2 = new DBDestination(); + dest2.DestinationTableDefinition = dest2TableDefinition; + + source.LinkTo(multicast); + multicast.LinkTo(dest1, row => row.Value2 <= 2); + multicast.LinkTo(dest2, row => row.Value2 > 2); + source.Execute(); + dest1.Wait(); + dest2.Wait(); + + Assert.AreEqual(3, RowCountTask.Count("test.Source","Col2 in (1,2,3)")); + Assert.AreEqual(2, RowCountTask.Count("test.Destination1", "Col2 in (1,2)")); + Assert.AreEqual(1, RowCountTask.Count("test.Destination2")); + + } + + internal TableDefinition CreateTableForMyDataRow(string tableName) { + TableDefinition def = new TableDefinition(tableName, new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: true), + new TableColumn("Col2", "int", allowNulls: true) + }); + def.CreateTable(); + return def; + } + + private static void InsertDemoDataForMyRowTable(string tableName) { + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test1',1)"); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test2',2)"); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test3',3)"); + } + } + +} diff --git a/ETLBoxTest/DataFlow/TestDataFlowSort.cs b/ETLBoxTest/DataFlow/TestDataFlowSort.cs new file mode 100644 index 00000000..4b380e33 --- /dev/null +++ b/ETLBoxTest/DataFlow/TestDataFlowSort.cs @@ -0,0 +1,82 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System; +using System.Collections.Generic; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestDataFlowBlockSort { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void ClassInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestInitialize] + public void TestInit() { + CleanUpSchemaTask.CleanUp("test"); + } + + public class MySimpleRow { + public string Value1 { get; set; } + public int Value2 { get; set; } + + + } + + + /* + * DSBSource (out: object) -> BlockTransformation (in/out: object) -> DBDestination (in: object) + */ + [TestMethod] + public void DB_BlockTrans_DB() { + TableDefinition sourceTableDefinition = CreateSourceTable("test.Source"); + TableDefinition destinationTableDefinition = CreateDestinationTable("test.Destination"); + + DBSource source = new DBSource(sourceTableDefinition); + DBDestination dest = new DBDestination(destinationTableDefinition); + Comparison comp = new Comparison( + (x, y) => y.Value2 - x.Value2 + ); + Sort block = new Sort(comp); + source.LinkTo(block); + block.LinkTo(dest); + source.Execute(); + dest.Wait(); + Assert.AreEqual(1, RowCountTask.Count("test.Destination", "[Key] = 1 and Col2 = 3")); + Assert.AreEqual(1, RowCountTask.Count("test.Destination", "[Key] = 2 and Col2 = 2")); + Assert.AreEqual(1, RowCountTask.Count("test.Destination", "[Key] = 3 and Col2 = 1")); + } + + private static TableDefinition CreateSourceTable(string tableName) { + TableDefinition sourceTableDefinition = new TableDefinition(tableName, new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: false), + new TableColumn("Col2", "int", allowNulls: true) + }); + sourceTableDefinition.CreateTable(); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test1',1)"); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test2',2)"); + SqlTask.ExecuteNonQuery("Insert demo data", $"insert into {tableName} values('Test3',3)"); + return sourceTableDefinition; + } + + private static TableDefinition CreateDestinationTable(string tableName) { + TableDefinition destinationTableDefinition = new TableDefinition(tableName, new List() { + new TableColumn("Key", "int", allowNulls: false, isIdentity:true, isPrimaryKey:true), + new TableColumn("Col1", "nvarchar(100)", allowNulls: false), + new TableColumn("Col2", "int", allowNulls: true) + }); + destinationTableDefinition.CreateTable(); + return destinationTableDefinition; + } + + + + } + +} diff --git a/ETLBoxTest/DataFlow/TestDataFlowTransformation.cs b/ETLBoxTest/DataFlow/TestDataFlowTransformation.cs new file mode 100644 index 00000000..c96c1f0e --- /dev/null +++ b/ETLBoxTest/DataFlow/TestDataFlowTransformation.cs @@ -0,0 +1,153 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.Collections.Generic; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestDataFlowTransformation { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void ClassInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestInitialize] + public void TestInit() { + CleanUpSchemaTask.CleanUp("test"); + } + + /* + * CSVSource (out: string[]) -> RowTransformation (in: string[], out: object)-> DBDestination (in: object) + */ + [TestMethod] + public void CSV_RowTrans_DB() { + TableDefinition destinationTableDefinition = new TableDefinition("test.Staging", new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: false), + new TableColumn("Col2", "int", allowNulls: true) + }); + destinationTableDefinition.CreateTable(); + + CSVSource source = new CSVSource("DataFlow/Simple_CSV2DB.csv"); + RowTransformation trans = new RowTransformation( + csvdata => { + return new MySimpleRow() { + Value1 = csvdata[0], + Value2 = int.Parse(csvdata[1]) + }; + }); + DBDestination dest = new DBDestination() { DestinationTableDefinition = destinationTableDefinition }; + source.LinkTo(trans); + trans.LinkTo(dest); + source.Execute(); + dest.Wait(); + Assert.AreEqual(3, SqlTask.ExecuteScalar("Check staging table", "select count(*) from test.Staging")); + } + + public class MySimpleRow { + public string Value1 { get; set; } + public int Value2 { get; set; } + } + + + /* + * DBSource (out: object) -> RowTransformation (in: object, out: object) -> DBDestination (in: object) + */ + [TestMethod] + public void DB_RowTrans_DB() { + TableDefinition sourceTableDefinition = CreateDBSourceTableForSimpleRow(); + TableDefinition destinationTableDefinition = CreateDBDestinationTableForSimpleRow(); + + DBSource source = new DBSource() { SourceTableDefinition = sourceTableDefinition }; + RowTransformation trans = new RowTransformation(myRow => { + myRow.Value2 += 1; + return myRow; + }); + DBDestination dest = new DBDestination() { DestinationTableDefinition = destinationTableDefinition }; + source.LinkTo(trans); + trans.LinkTo(dest); + source.Execute(); + dest.Wait(); + Assert.AreEqual(3, SqlTask.ExecuteScalar("Check destination table", "select count(*) from test.Destination")); + Assert.AreEqual(9, SqlTask.ExecuteScalar("Check destination table", "select sum(Col2) from test.Destination")); + } + + internal TableDefinition CreateDBSourceTableForSimpleRow() { + TableDefinition sourceTableDefinition = new TableDefinition("test.Source", new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: false), + new TableColumn("Col2", "int", allowNulls: true) + }); + sourceTableDefinition.CreateTable(); + SqlTask.ExecuteNonQuery("Insert demo data", "insert into test.Source values('Test1',1)"); + SqlTask.ExecuteNonQuery("Insert demo data", "insert into test.Source values('Test2',2)"); + SqlTask.ExecuteNonQuery("Insert demo data", "insert into test.Source values('Test3',3)"); + return sourceTableDefinition; + } + + internal TableDefinition CreateDBDestinationTableForSimpleRow() { + TableDefinition destinationTableDefinition = new TableDefinition("test.Destination", new List() { + new TableColumn("Col1", "nvarchar(100)", allowNulls: false), + new TableColumn("Col2", "int", allowNulls: true) + }); + destinationTableDefinition.CreateTable(); + return destinationTableDefinition; + } + + /* + * DBSource (out: object) -> RowTransformation (in: object, out: object) --> DBDestination (in: object) + */ + [TestMethod] + public void DB_RowTrans_DB_WithInitAction() { + TableDefinition sourceTableDefinition = CreateDBSourceTableForSimpleRow(); + TableDefinition destinationTableDefinition = CreateDBDestinationTableForSimpleRow(); + + RowTransformationTestClass testClass = new RowTransformationTestClass(); + DBSource source = new DBSource() { SourceTableDefinition = sourceTableDefinition }; + RowTransformation trans = new RowTransformation( + "RowTransformation testing init Action", + testClass.TestTransformationFunc, + testClass.SetAddValue + ); + DBDestination dest = new DBDestination() { DestinationTableDefinition = destinationTableDefinition }; + source.LinkTo(trans); + trans.LinkTo(dest); + source.Execute(); + dest.Wait(); + Assert.AreEqual(3, SqlTask.ExecuteScalar("Check destination table", "select count(*) from test.Destination")); + Assert.AreEqual(9, SqlTask.ExecuteScalar("Check destination table", "select sum(Col2) from test.Destination")); + } + + public class RowTransformationTestClass { + public int AddValue { get; set; } = 0; + public void SetAddValue() { + AddValue = 1; + } + + public MySimpleRow TestTransformationFunc(MySimpleRow myRow) { + myRow.Value2 += AddValue; + return myRow; + } + } + + [TestMethod] + public void TestLogging_DB_RowTrans_DB() { + CreateLogTablesTask.CreateLog(); + DB_RowTrans_DB(); + Assert.AreEqual(2, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='DF_DBSOURCE' group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + Assert.AreEqual(2, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='DF_DBDEST' group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + } + + [TestMethod] + public void TestLogging_CSV_RowTrans_DB() { + CreateLogTablesTask.CreateLog(); + CSV_RowTrans_DB(); + Assert.AreEqual(2, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='DF_CSVSOURCE' group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + Assert.AreEqual(2, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='DF_DBDEST' group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + } + } + +} diff --git a/ETLBoxTest/Database/TestAddFileGroupTask.cs b/ETLBoxTest/Database/TestAddFileGroupTask.cs new file mode 100644 index 00000000..24809eba --- /dev/null +++ b/ETLBoxTest/Database/TestAddFileGroupTask.cs @@ -0,0 +1,39 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestAddFileGroupTask + { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + } + + [TestMethod] + public void TestAddFileGroup() { + string fgName = TestHelper.RandomString(10) + "_FG"; + Assert.AreEqual(0,SqlTask.ExecuteScalar("FileGroup", $"select count(*) from sys.filegroups where name = '{fgName}'")); + AddFileGroupTask.AddFileGroup(fgName, DBNameParameter, "2048KB", "2048KB", false); + Assert.AreEqual(1,SqlTask.ExecuteScalar("FileGroup", $"select count(*) from sys.filegroups where name = '{fgName}'")); + Assert.AreEqual(1,SqlTask.ExecuteScalar("FileGroup", $"select count(*) from sys.sysfiles where name = '{fgName}'")); + + } + + [TestMethod] + public void TestAddDefaultFileGroup() { + string fgName = TestHelper.RandomString(10) + "_FG"; + Assert.AreEqual(0,SqlTask.ExecuteScalar("FileGroup", $"select count(*) from sys.filegroups where name = '{fgName}' and is_default = 1")); + AddFileGroupTask.AddFileGroup(fgName, DBNameParameter, "5MB", "5MB", true); + Assert.AreEqual(1,SqlTask.ExecuteScalar("FileGroup", $"select count(*) from sys.filegroups where name = '{fgName}' and is_default = 1")); + Assert.AreEqual(1,SqlTask.ExecuteScalar("FileGroup", $"select count(*) from sys.sysfiles where name = '{fgName}'")); + + } + + } +} diff --git a/ETLBoxTest/Database/TestCRUDProcedureTask.cs b/ETLBoxTest/Database/TestCRUDProcedureTask.cs new file mode 100644 index 00000000..d1f44201 --- /dev/null +++ b/ETLBoxTest/Database/TestCRUDProcedureTask.cs @@ -0,0 +1,66 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.Collections.Generic; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestCRUDProcedureTask { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestMethod] + public void TestCreateProcedure() { + CRUDProcedureTask.CreateOrAlter("test.Test1", "select 1 as Test"); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if proc exists", "select count(*) from sys.objects where type = 'P' and object_id = object_id('test.Test1')")); + } + + [TestMethod] + public void TestAlterProcedure() { + CRUDProcedureTask.CreateOrAlter("test.Test3", "select 1 as Test"); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if proc exists", "select count(*) from sys.objects where type = 'P' and object_id = object_id('test.Test3') and create_date = modify_date")); + CRUDProcedureTask.CreateOrAlter("test.Test3", "select 5 as Test"); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if proc exists", "select count(*) from sys.objects where type = 'P' and object_id = object_id('test.Test3') and create_date <> modify_date")); + } + + [TestMethod] + public void TestCreatProcedureWithParameter() { + List pars = new List() { + new ProcedureParameter("Par1", "varchar(10)"), + new ProcedureParameter("Par2", "int", "7"), + }; + CRUDProcedureTask.CreateOrAlter("test.Test2", "select 1 as Test",pars); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if proc exists", "select count(*) from sys.objects where type = 'P' and object_id = object_id('test.Test2')")); + Assert.AreEqual(SqlTask.ExecuteScalar("Check if parameter exists" + , "select count(*) from sys.parameters where object_id = object_id('test.Test2')"), 2); + } + + [TestMethod] + public void TestCreatProcedureWithProcedureObject() { + List pars = new List() { + new ProcedureParameter("Par1", "varchar(10)"), + new ProcedureParameter("Par2", "int", "7"), + }; + ProcedureDefinition def = new ProcedureDefinition("test.Test2", "select 1 as Test", pars); + CRUDProcedureTask.CreateOrAlter(def); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if proc exists", "select count(*) from sys.objects where type = 'P' and object_id = object_id('test.Test2')")); + Assert.AreEqual(SqlTask.ExecuteScalar("Check if parameter exists" + , "select count(*) from sys.parameters where object_id = object_id('test.Test2')"), 2); + } + + [TestMethod] + public void TestLogging() { + CreateLogTablesTask.CreateLog(); + CRUDProcedureTask.CreateOrAlter("test.Test4", "select 1 as Test"); + Assert.AreEqual(4, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='CRUDPROC' group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + } + + } +} diff --git a/ETLBoxTest/Database/TestCRUDViewTask.cs b/ETLBoxTest/Database/TestCRUDViewTask.cs new file mode 100644 index 00000000..b4cffe47 --- /dev/null +++ b/ETLBoxTest/Database/TestCRUDViewTask.cs @@ -0,0 +1,42 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestCRUDViewTask { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestMethod] + public void TestCreateView() { + CRUDViewTask.CreateOrAlter("test.View1", "select 1 as Test"); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if view exists", "select count(*) from sys.objects where type = 'V' and object_id = object_id('test.View1')")); + } + + [TestMethod] + public void TestAlterView() { + CRUDViewTask.CreateOrAlter("test.View2", "select 1 as Test"); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if view exists", "select count(*) from sys.objects where type = 'V' and object_id = object_id('test.View2') and create_date = modify_date")); + CRUDViewTask.CreateOrAlter("test.View2", "select 5 as Test"); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if view exists", "select count(*) from sys.objects where type = 'V' and object_id = object_id('test.View2') and create_date <> modify_date")); + } + + [TestMethod] + public void TestLogging() { + CreateLogTablesTask.CreateLog(); + CRUDViewTask.CreateOrAlter("test.View3", "select 1 as Test"); + Assert.AreEqual(4, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='CRUDVIEW' group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + } + + + + } +} diff --git a/ETLBoxTest/Database/TestCalculateDatabaseHashTask.cs b/ETLBoxTest/Database/TestCalculateDatabaseHashTask.cs new file mode 100644 index 00000000..df7f2e09 --- /dev/null +++ b/ETLBoxTest/Database/TestCalculateDatabaseHashTask.cs @@ -0,0 +1,33 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.Collections.Generic; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestCalculateDatabaseTask { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestMethod] + public void TestHashCalculationForOneTable() { + List columns = new List() { new TableColumn("value", "int") }; + CreateTableTask.Create("test.Table1", columns); + + string hash = CalculateDatabaseHashTask.Calculate(new List() { "test" }); + string hashAgain = CalculateDatabaseHashTask.Calculate(new List() { "test" }); + + Assert.AreEqual(hash, hashAgain); + Assert.AreEqual("A35318F3AE62DD0BA0607BB24F2103CFE77661B3", hash); + + } + + } +} diff --git a/ETLBoxTest/Database/TestCleanUpSchemaTask.cs b/ETLBoxTest/Database/TestCleanUpSchemaTask.cs new file mode 100644 index 00000000..39c6209e --- /dev/null +++ b/ETLBoxTest/Database/TestCleanUpSchemaTask.cs @@ -0,0 +1,44 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestCleanUpSchemaTask + { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + } + + [TestMethod] + public void TestCleanUpSchema() + { + string schemaName = "s"+TestHelper.RandomString(9); + SqlTask.ExecuteNonQuery("Create schema", $"create schema {schemaName}"); + SqlTask.ExecuteNonQuery("Create schema", $"create table {schemaName}.Table1 ( Nothing int null )"); + SqlTask.ExecuteNonQuery("Create schema", $"create view {schemaName}.View1 as select * from {schemaName}.Table1"); + SqlTask.ExecuteNonQuery("Create schema", $"create procedure {schemaName}.Proc1 as select * from {schemaName}.Table1"); + var objCountSql = new SqlTask("Count object", $@"select count(*) from sys.objects obj + inner join sys.schemas sch on sch.schema_id = obj.schema_id +where sch.name = '{schemaName}'"); + Assert.AreEqual(3,objCountSql.ExecuteScalar()); + CleanUpSchemaTask.CleanUp(schemaName); + Assert.AreEqual(0,objCountSql.ExecuteScalar()); + } + + [TestMethod] + public void TestCleanupETLLogTables() { + CreateLogTablesTask.CreateLog(); + Assert.IsTrue(new SqlTask("Check etl.Log table", $"select count(*) from sys.tables where type = 'U' and name = 'Log' and schema_id('etl') = schema_id") { DisableLogging = true }.ExecuteScalarAsBool()); + Assert.IsTrue(new SqlTask("Check etl.LoadProcess table", $"select count(*) from sys.tables where type = 'U' and name = 'LoadProcess' and schema_id('etl') = schema_id") { DisableLogging = true }.ExecuteScalarAsBool()); + CleanUpSchemaTask.CleanUp("etl"); + Assert.IsFalse(new SqlTask("Check etl.Log table", $"select count(*) from sys.tables where type = 'U' and name = 'Log' and schema_id('etl') = schema_id") { DisableLogging = true }.ExecuteScalarAsBool()); + Assert.IsFalse(new SqlTask("Check etl.LoadProcess table", $"select count(*) from sys.tables where type = 'U' and name = 'LoadProcess' and schema_id('etl') = schema_id") { DisableLogging = true }.ExecuteScalarAsBool()); + } + } +} diff --git a/ETLBoxTest/Database/TestCreateDeleteDatabaseTasks.cs b/ETLBoxTest/Database/TestCreateDeleteDatabaseTasks.cs new file mode 100644 index 00000000..f9c1ac34 --- /dev/null +++ b/ETLBoxTest/Database/TestCreateDeleteDatabaseTasks.cs @@ -0,0 +1,50 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestCreateDeleteDatabaseTask + { + public TestContext TestContext { get; set; } + + [ClassInitialize] + public static void TestInit(TestContext testContext) + { + string connectionString = testContext.Properties["connectionString"].ToString(); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(connectionString).GetMasterConnection()); + } + + [TestMethod] + public void TestDropDB() { + string dbName = TestContext.Properties["dbName"].ToString(); + var sqlTask = new SqlTask("Get assert data", $"select cast(db_id('{dbName}') as int)"); + + CreateDatabaseTask.Create(dbName); + + Assert.IsTrue(sqlTask.ExecuteScalarAsBool()); + + DropDatabaseTask.Delete(dbName); + + Assert.IsFalse(sqlTask.ExecuteScalarAsBool()); + } + + + [TestMethod] + public void TestCreateWithAllParameters() + { + string dbName = TestContext.Properties["dbName"].ToString(); + var sqlTask = new SqlTask("Get assert data", $"select cast(db_id('{dbName}') as int)"); + + DropDatabaseTask.Delete(dbName); + + Assert.IsFalse(sqlTask.ExecuteScalarAsBool()); + + CreateDatabaseTask.Create(dbName, RecoveryModel.Simple, "Latin1_General_CS_AS"); + + Assert.IsTrue(sqlTask.ExecuteScalarAsBool()); + + } + + + } +} diff --git a/ETLBoxTest/Database/TestCreateIndexTask.cs b/ETLBoxTest/Database/TestCreateIndexTask.cs new file mode 100644 index 00000000..c7c769f6 --- /dev/null +++ b/ETLBoxTest/Database/TestCreateIndexTask.cs @@ -0,0 +1,52 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.Collections.Generic; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestCreateIndexTask { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + SqlTask.ExecuteNonQuery("Create test table", $@"create table test.Table1 ( key1 int null, key2 int not null)"); + SqlTask.ExecuteNonQuery("Create test table", $@"create table test.Table2 ( key1 int null, key2 int not null, value1 datetime null, value2 varchar(100) not null )"); + } + + [TestMethod] + public void TestCreateIndex() { + string indexName = "ix_" + TestHelper.RandomString(5); + CreateIndexTask.Create(indexName, "test.Table1",new List() { "key1", "key2" } ); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if index exists", $"select count(*) from sys.indexes where name = '{indexName}'")); + } + + [TestMethod] + public void TestReCreateIndex() { + string indexName = "ix_" + TestHelper.RandomString(5); + CreateIndexTask.Create(indexName, "test.Table1", new List() { "key1", "key2" }); + CreateIndexTask.Create(indexName, "test.Table1", new List() { "key1", "key2" }); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if index exists", $"select count(*) from sys.indexes where name = '{indexName}'")); + } + + [TestMethod] + public void TestCreateIndexWithInclude() { + string indexName = "ix_" + TestHelper.RandomString(5); + CreateIndexTask.Create(indexName, "test.Table2", new List() { "key1","key2" }, new List() { "value1", "value2"}); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if index exists", $"select count(*) from sys.indexes where name = '{indexName}'")); + } + + [TestMethod] + public void TestLogging() { + CreateLogTablesTask.CreateLog(); + CreateIndexTask.Create("ix_" + TestHelper.RandomString(5), "test.Table1", new List() { "key1", "key2" }); + Assert.AreEqual(2, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='CREATEINDEX' group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + } + + + } +} diff --git a/ETLBoxTest/Database/TestCreateSchemaTask.cs b/ETLBoxTest/Database/TestCreateSchemaTask.cs new file mode 100644 index 00000000..7572ffc5 --- /dev/null +++ b/ETLBoxTest/Database/TestCreateSchemaTask.cs @@ -0,0 +1,34 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestCreateSchemaTask { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + } + + [TestMethod] + public void TestCreateSchema() { + string schemaName = "s" + TestHelper.RandomString(9); + CreateSchemaTask.Create(schemaName); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if schema exists", $"select count(*) from sys.schemas where schema_name(schema_id) = '{schemaName}'")); + + } + + [TestMethod] + public void TestLogging() { + CreateLogTablesTask.CreateLog(); + CreateSchemaTask.Create("s" + TestHelper.RandomString(9)); + Assert.AreEqual(2, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='CREATESCHEMA' group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + } + + + } +} diff --git a/ETLBoxTest/Database/TestCreateTableTask.cs b/ETLBoxTest/Database/TestCreateTableTask.cs new file mode 100644 index 00000000..adeff225 --- /dev/null +++ b/ETLBoxTest/Database/TestCreateTableTask.cs @@ -0,0 +1,117 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.Collections.Generic; +using System.Linq; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestCreateTableTask { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestMethod] + public void TestCreateTable() { + List columns = new List() { new TableColumn("value", "int") }; + CreateTableTask.Create("test.Table1", columns); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if table exists", $"select count(*) from sys.objects where type = 'U' and object_id = object_id('test.Table1')")); + + } + + [TestMethod] + public void TestReCreateTable() { + List columns = new List() { new TableColumn("value", "int") }; + CreateTableTask.Create("test.Table1", columns); + CreateTableTask.Create("test.Table1", columns); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if table exists", $"select count(*) from sys.objects where type = 'U' and object_id = object_id('test.Table1')")); + + } + + [TestMethod] + public void TestCreateTableWithNullable() { + List columns = new List() { new TableColumn("value", "int"), new TableColumn("value2", "datetime", true) }; + CreateTableTask.Create("test.Table2", columns); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if table exists", $"select count(*) from sys.objects where type = 'U' and object_id = object_id('test.Table2')")); + + } + + [TestMethod] + public void TestCreateTableWithPrimaryKey() { + List columns = new List() { + new TableColumn("Key", "int",allowNulls:false,isPrimaryKey:true), + new TableColumn("value2", "datetime", allowNulls:true) + }; + CreateTableTask.Create("test.Table3", columns); + Assert.AreEqual(2,SqlTask.ExecuteScalar("Check if column exists", $"select count(*) from sys.columns where object_id = object_id('test.Table3')")); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if primary key exists", $"select count(*) from sys.key_constraints where parent_object_id = object_id('test.Table3')")); + Assert.AreEqual("pk_Table3_Key", SqlTask.ExecuteScalar("Check if primary key has correct naming", "select name from sys.key_constraints where parent_object_id = object_id('test.Table3')")); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if column is nullable", $"select case when is_nullable = 1 then 1 else 0 end from sys.columns where object_id = object_id('test.Table3') and name='value2'")); + + } + + [TestMethod] + public void TestCreateTableOnlyNVarChars() { + List columns = new List() { + new TableColumn("value1", "int",allowNulls:false), + new TableColumn("value2", "datetime", allowNulls:true) + }; + new CreateTableTask("test.Table4", columns.Cast().ToList()) { OnlyNVarCharColumns = true }.Execute(); + Assert.AreEqual(2, SqlTask.ExecuteScalar("Check if column exists", $"select count(*) from sys.columns where object_id = object_id('test.Table4')")); + Assert.AreEqual(2,SqlTask.ExecuteScalar("Check if columns are nvarchar", $@"select count(*) from sys.columns cols inner join sys.types t on t.system_type_id = cols.system_type_id where object_id = object_id('test.Table4') and t.name = 'nvarchar'")); + + + } + + [TestMethod] + public void TestCreateTableWithIdentity() { + List columns = new List() { + new TableColumn("value1", "int",allowNulls:false) { IsIdentity =true, IdentityIncrement = 1000, IdentitySeed = 50 } + }; + CreateTableTask.Create("test.Table5", columns); + Assert.AreEqual(1, SqlTask.ExecuteScalar("Check if column exists", $"select count(*) from sys.columns where object_id = object_id('test.Table5')")); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if column has identity" + , $@"select case when is_identity = 1 then 1 else 0 end from sys.columns cols inner join sys.types t on t.system_type_id = cols.system_type_id + where object_id = object_id('test.Table5') and cols.name = 'value1'")); + + + } + + [TestMethod] + public void TestCreateTableWithDefault() { + List columns = new List() { + new TableColumn("value1", "int",allowNulls:false) { DefaultValue = "0" }, + new TableColumn("value2", "nvarchar(10)",allowNulls:false) { DefaultValue = "Test" }, + new TableColumn("value3", "decimal",allowNulls:false) { DefaultConstraintName="TestConstraint", DefaultValue = "3.12" } + }; + CreateTableTask.Create("test.Table6", columns); + Assert.AreEqual(3, SqlTask.ExecuteScalar("Check if column exists", $"select count(*) from sys.columns where object_id = object_id('test.Table6')")); + } + + + [TestMethod] + public void TestCreateTableWithComputedColumn() { + List columns = new List() { + new TableColumn("value1", "int",allowNulls:false) , + new TableColumn("value2", "int",allowNulls:false) , + new TableColumn("compValue", "bigint",allowNulls:true) { ComputedColumn = "value1 * value2" } + }; + CreateTableTask.Create("test.Table7", columns); + Assert.AreEqual(3, SqlTask.ExecuteScalar("Check if column exists", $"select count(*) from sys.columns where object_id = object_id('test.Table7')")); + } + + [TestMethod] + public void TestLogging() { + CreateLogTablesTask.CreateLog(); + CreateTableTask.Create("test.Table8", new List() { new TableColumn("value", "int") }); + Assert.AreEqual(2, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='CREATETABLE' group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + } + + } +} diff --git a/ETLBoxTest/Database/TestDropTableTask.cs b/ETLBoxTest/Database/TestDropTableTask.cs new file mode 100644 index 00000000..cdf162f1 --- /dev/null +++ b/ETLBoxTest/Database/TestDropTableTask.cs @@ -0,0 +1,38 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.Collections.Generic; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestDropTableTask { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestMethod] + public void TestDropTable() { + List columns = new List() { new TableColumn("value", "int") }; + CreateTableTask.Create("test.Table1", columns); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if table exists", $"select count(*) from sys.objects where type = 'U' and object_id = object_id('test.Table1')")); + DropTableTask.Drop("test.Table1"); + Assert.IsFalse(SqlTask.ExecuteScalarAsBool("Check if table exists", $"select count(*) from sys.objects where type = 'U' and object_id = object_id('test.Table1')")); + + } + + [TestMethod] + public void TestLogging() { + CreateLogTablesTask.CreateLog(); + CreateTableTask.Create("test.Table8", new List() { new TableColumn("value", "int") }); + DropTableTask.Drop("test.Table8"); + Assert.AreEqual(2, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='DROPTABLE' group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + } + + } +} diff --git a/ETLBoxTest/Database/TestGetDatabaseListTask.cs b/ETLBoxTest/Database/TestGetDatabaseListTask.cs new file mode 100644 index 00000000..587701c5 --- /dev/null +++ b/ETLBoxTest/Database/TestGetDatabaseListTask.cs @@ -0,0 +1,30 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.Collections.Generic; +using System.Linq; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestGetDatabaseListTask { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + } + + [TestMethod] + public void TestGetDatabaseList() { + List allDatabases = GetDatabaseListTask.List(); + + Assert.IsTrue(allDatabases.Count > 1); + Assert.IsTrue(allDatabases.Any(name => name == DBNameParameter)); + + } + + } +} diff --git a/ETLBoxTest/Database/TestParallelProcessing.cs b/ETLBoxTest/Database/TestParallelProcessing.cs new file mode 100644 index 00000000..53c3ca6a --- /dev/null +++ b/ETLBoxTest/Database/TestParallelProcessing.cs @@ -0,0 +1,30 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.Collections.Generic; +using System.Threading.Tasks; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestParallelProcessing { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + + } + + [TestMethod] + public void TestSqLTaskInParallel() { + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(TestContext.Properties["connectionString"].ToString())); + List array = new List() { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; + Parallel.ForEach(array, new ParallelOptions { MaxDegreeOfParallelism = 2 }, + curNr => SqlTask.ExecuteNonQuery($"Test statement {curNr}", $"select 1") + ); + + + } + } +} diff --git a/ETLBoxTest/Database/TestRowCountTask.cs b/ETLBoxTest/Database/TestRowCountTask.cs new file mode 100644 index 00000000..64f3b2c9 --- /dev/null +++ b/ETLBoxTest/Database/TestRowCountTask.cs @@ -0,0 +1,46 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestRowCountTask { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + SqlTask.ExecuteNonQuery("Create test data table",$@" +create table test.RC ( value int null ) +insert into test.RC select * from (values (1), (2), (3)) AS MyTable(v)"); + } + + [TestMethod] + public void TestCount() { + + Assert.AreEqual(3, RowCountTask.Count("test.RC")); + } + + [TestMethod] + public void TestCountWithCondition() { + Assert.AreEqual(1, RowCountTask.Count("test.RC", "value = 2")); + } + + [TestMethod] + public void TestCountWithQuickQueryMode() { + Assert.AreEqual(3, RowCountTask.Count("test.RC", RowCountOptions.QuickQueryMode)); + } + + [TestMethod] + public void TestLogging() { + CreateLogTablesTask.CreateLog(); + RowCountTask.Count("test.RC"); + Assert.AreEqual(2, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='ROWCOUNT' group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + RowCountTask.Count("test.RC", "value = 2"); + Assert.AreEqual(2, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='ROWCOUNT' and Message like '%with condition%' group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + } + } +} diff --git a/ETLBoxTest/Database/TestSqlTask.cs b/ETLBoxTest/Database/TestSqlTask.cs new file mode 100644 index 00000000..86b9c70e --- /dev/null +++ b/ETLBoxTest/Database/TestSqlTask.cs @@ -0,0 +1,109 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System; +using System.Collections.Generic; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestSqlTask { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + } + + [TestMethod] + public void TestExecuteNonQuery() { + string propName = TestHelper.RandomString(10); + SqlTask.ExecuteNonQuery("Test add extended property", $"exec sp_addextendedproperty @name = N'{propName}', @value = 'Test';"); + string asisCollation = SqlTask.ExecuteScalar("Get reference result", $"select value from fn_listextendedproperty('{propName}', default, default, default, default, default, default)").ToString(); + Assert.AreEqual("Test", asisCollation); + SqlTask.ExecuteNonQuery("Drop extended property", $"exec sp_dropextendedproperty @name = N'{propName}'"); + + } + + [TestMethod] + public void TestExecuteScalar() { + object result = SqlTask.ExecuteScalar("Test execute scalar", "select cast('Hallo Welt' as nvarchar(100)) as ScalarResult"); + Assert.AreEqual(result.ToString(), "Hallo Welt"); + + } + + [TestMethod] + public void TestExecuteScalarDatatype() { + decimal result = (decimal)(SqlTask.ExecuteScalar("Test execute scalar with datatype", "select cast(1.343 as numeric(4,3)) as ScalarResult")); + Assert.AreEqual(result, 1.343m); + + } + + [TestMethod] + public void TestExecuteScalarAsBool() { + bool result = SqlTask.ExecuteScalarAsBool("Test execute scalar as bool", "select 1 as Bool"); + Assert.IsTrue(result); + } + + [TestMethod] + public void TestExecuteReaderSingleColumn() { + List asIsResult = new List(); + List toBeResult = new List() { 1, 2, 3 }; + SqlTask.ExecuteReader("Test execute reader", "SELECT * FROM (VALUES (1),(2),(3)) MyTable(a)", + colA => asIsResult.Add((int)colA)); + CollectionAssert.AreEqual(asIsResult, toBeResult); + } + + public class ThreeInteger : IEquatable { + public int A { get; set; } + public int B { get; set; } + public int C { get; set; } + public ThreeInteger() { } + public ThreeInteger(int a, int b, int c) { + A = a; B = b; C = c; + } + public bool Equals(ThreeInteger other) => other != null ? other.A == A && other.B == B && other.C == C : false; + public override bool Equals(object obj) { + return this.Equals((ThreeInteger)obj); + } + public override int GetHashCode() { + return base.GetHashCode(); + } + } + + [TestMethod] + public void TestExecuteReaderMultiColumn() { + List asIsResult = new List(); + List toBeResult = new List() { new ThreeInteger(1, 2, 3), new ThreeInteger(4, 5, 6), new ThreeInteger(7, 8, 9) }; + ThreeInteger CurColumn = new ThreeInteger(); + SqlTask.ExecuteReader("Test execute reader", "SELECT * FROM (VALUES (1, 2, 3), (4, 5, 6), (7, 8, 9)) AS MyTable(a,b,c)" + , () => CurColumn = new ThreeInteger() + , () => asIsResult.Add(CurColumn) + , colA => CurColumn.A = (int)colA + , colB => CurColumn.B = (int)colB + , colC => CurColumn.C = (int)colC + ); + CollectionAssert.AreEqual(asIsResult, toBeResult); + } + + [TestMethod] + public void TestLogging() { + RemoveLogTablesTask.Remove(); + CreateLogTablesTask.CreateLog(); + SqlTask.ExecuteNonQuery("Test select", $"select 1 as test"); + Assert.AreEqual(2, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='SQL' group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + } + + [TestMethod] + public void TestLoggingWithoutCFConnection() { + ControlFlow.CurrentDbConnection = null; + var connection = new SqlConnectionManager(new ConnectionString(TestContext.Properties["connectionString"].ToString())); + new RemoveLogTablesTask() { ConnectionManager = connection }.Execute(); + new CreateLogTablesTask() { ConnectionManager = connection }.Execute(); + new SqlTask("Test select", $"select 1 as test") { ConnectionManager = connection }.ExecuteNonQuery(); + Assert.AreEqual(2, new SqlTask("Find log entry", "select count(*) from etl.Log where TaskType='SQL' group by TaskHash") { DisableLogging = true, ConnectionManager = connection }.ExecuteScalar()); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(ConnectionStringParameter); + } + } +} diff --git a/ETLBoxTest/Database/TestTruncateTable.cs b/ETLBoxTest/Database/TestTruncateTable.cs new file mode 100644 index 00000000..7175b017 --- /dev/null +++ b/ETLBoxTest/Database/TestTruncateTable.cs @@ -0,0 +1,37 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestTruncateTable { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateSchemaTask.Create("test"); + SqlTask.ExecuteNonQuery("Create test data table",$@" +if object_id('dbo.TRC') is not null drop table dbo.TRC +create table dbo.TRC ( value int null ) +insert into dbo.TRC select * from (values (1), (2), (3)) AS MyTable(v)"); + } + + [TestMethod] + public void TestTruncate() { + Assert.AreEqual(3, RowCountTask.Count("dbo.TRC")); + TruncateTableTask.Truncate("dbo.TRC"); + Assert.AreEqual(0, RowCountTask.Count("dbo.TRC")); + } + + [TestMethod] + public void TestLogging() { + CreateLogTablesTask.CreateLog(); + TruncateTableTask.Truncate("dbo.TRC"); + Assert.IsTrue(new SqlTask("Find log entry", "select count(*) from etl.Log where TaskAction = 'START' and TaskType='TRUNCATE'") { DisableLogging = true }.ExecuteScalarAsBool()); + } + + } +} diff --git a/ETLBoxTest/ETLBoxTest.csproj b/ETLBoxTest/ETLBoxTest.csproj new file mode 100644 index 00000000..94bd2aae --- /dev/null +++ b/ETLBoxTest/ETLBoxTest.csproj @@ -0,0 +1,143 @@ + + + + Debug + AnyCPU + {E18C5029-9FA4-4846-828C-1B42D5D18C3C} + Library + Properties + ALE.ETLBoxTest + ETLBoxTest + v4.6.1 + 512 + {3AC096D0-A1C2-E12C-1390-A8335801FDAB};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC} + 10.0 + $(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion) + $(ProgramFiles)\Common Files\microsoft shared\VSTT\$(VisualStudioVersion)\UITestExtensionPackages + False + UnitTest + + + + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + + + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + + ..\packages\Newtonsoft.Json.11.0.1\lib\net45\Newtonsoft.Json.dll + + + + + + + + + + + + + False + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + {0c7548ba-a597-488a-8add-2f628be691d6} + ETLBox + + + + + + PreserveNewest + + + PreserveNewest + + + + + + + + + False + + + False + + + False + + + False + + + + + + + + \ No newline at end of file diff --git a/ETLBoxTest/Helper/BigDataHelper.cs b/ETLBoxTest/Helper/BigDataHelper.cs new file mode 100644 index 00000000..80eb0495 --- /dev/null +++ b/ETLBoxTest/Helper/BigDataHelper.cs @@ -0,0 +1,26 @@ +using ALE.ETLBox; +using System; +using System.IO; +using System.Linq; + +namespace ALE.ETLBoxTest { + public class BigDataHelper { + public string FileName { get; set; } + public TableDefinition TableDefinition { get; set; } + public int NumberOfRows { get; set; } + public void CreateBigDataCSV() { + using (FileStream stream = File.Open(FileName,FileMode.Create)) + using (StreamWriter writer = new StreamWriter(stream)) { + string header = String.Join(",", TableDefinition.Columns.Select(col => col.Name)); + writer.WriteLine(header); + for (int i = 0; i < NumberOfRows; i++) { + string line = String.Join(",",TableDefinition.Columns.Select(col => { + int length = DataTypeConverter.GetStringLengthFromCharString(col.DataType); + return TestHelper.RandomString(length); + })); + writer.WriteLine(line); + } + } + } + } +} diff --git a/ETLBoxTest/Helper/TestHelper.cs b/ETLBoxTest/Helper/TestHelper.cs new file mode 100644 index 00000000..bf10f2d3 --- /dev/null +++ b/ETLBoxTest/Helper/TestHelper.cs @@ -0,0 +1,65 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System; +using System.Linq; + +namespace ALE.ETLBoxTest { + public static class TestHelper + { + internal static string RandomString(int length) + { + var random = new Random(); + const string pool = "abcdefghijklmnopqrstuvwxyz0123456789"; + var chars = Enumerable.Range(0, length) + .Select(x => pool[random.Next(0, pool.Length)]); + return new string(chars.ToArray()); + } + + internal static void RecreateDatabase(TestContext testContext) { + string dbName = testContext.Properties["dbName"].ToString(); + var connectionString = new ConnectionString(testContext.Properties["connectionString"].ToString()); + var masterConnection = new SqlConnectionManager(connectionString.GetMasterConnection()); + ControlFlow.SetLoggingDatabase(masterConnection); + new DropDatabaseTask(dbName) { DisableLogging =true, ConnectionManager = masterConnection }.Execute(); + new CreateDatabaseTask(dbName, RecoveryModel.Simple, "Latin1_General_CS_AS") { DisableLogging=true, ConnectionManager = masterConnection }.Execute(); + ControlFlow.SetLoggingDatabase(new SqlConnectionManager(connectionString)); + + } + internal static string CreateCubeXMLA(string dbName) { + return $@" + + Cube + + + + {dbName} + {dbName} + + + ImpersonateCurrentUser + + + +"; + } + + internal static string DeleteCubeXMLA(string dbName) { + return $@" + + {dbName} + +"; + } + + internal static void RecreateCube(TestContext testContext) { + string dbName = testContext.Properties["dbName"].ToString(); + ControlFlow.CurrentAdomdConnection = new AdomdConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString()).GetConnectionWithoutCatalog()); + try { + XmlaTask.ExecuteNonQuery("Drop cube", DeleteCubeXMLA(dbName)); + } + catch { } + XmlaTask.ExecuteNonQuery("Create cube", CreateCubeXMLA(dbName)); + } + + } +} diff --git a/ETLBoxTest/Logging/TestETLLogWithDemo.cs b/ETLBoxTest/Logging/TestETLLogWithDemo.cs new file mode 100644 index 00000000..e7900f75 --- /dev/null +++ b/ETLBoxTest/Logging/TestETLLogWithDemo.cs @@ -0,0 +1,78 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System.IO; +using System.Text.RegularExpressions; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestGetLogAsJSONTask { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + ControlFlow.ClearSettings(); + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateLogTablesTask.CreateLog(); + } + + + [TestMethod] + public void TestGetDemoLogAsJSON() { + RunDemoProcess(); + string jsonresult = GetLogAsJSONTask.GetJSON(); + jsonresult = RemoveHashes(RemoveDates(jsonresult.ToLower().Trim())); + string expectedresult = RemoveHashes(RemoveDates(File.ReadAllText("Logging/demolog_tobe.json").ToLower().Trim())); + Assert.AreEqual(expectedresult, jsonresult); + } + + private static string RemoveHashes(string jsonresult) => Regex.Replace(jsonresult, @"""taskhash"": ""[A-Za-z0-9]*""", @"""taskHash"": """""); + private static string RemoveDates(string jsonresult) => Regex.Replace(jsonresult, @"[0-9]+-[0-9]+-[0-9]([T]|\w)+[0-9]+:[0-9]+:[0-9]+[.][0-9]+", ""); + + private void RunDemoProcess() { + new Sequence("Test sequence 1", RunSubSequence) { TaskType = "SUBPACKAGE" }.Execute(); + SqlTask.ExecuteNonQuery($"Sql #1", "Select 1 as test"); + LogTask.Info("Info message"); + } + + public void RunSubSequence() { + Sequence.Execute("Test sub sequence 1.1", () => { + SqlTask.ExecuteNonQuery($"Sql #2", "Select 1 as test"); + SqlTask.ExecuteNonQuery($"Sql #3", "Select 1 as test"); + LogTask.Warn("Warn message #1"); + }); + Sequence.Execute("Test sub sequence 1.2", () => { + SqlTask.ExecuteNonQuery($"Sql #4", "Select 1 as test"); + }); + Sequence.Execute("Test sub sequence 1.3", + () => { + Sequence.Execute("Test sub sequence 2.1", () => { + Sequence.Execute("Test sub sequence 3.1", () => { + SqlTask.ExecuteNonQuery($"Sql #5", "Select 1 as test"); + SqlTask.ExecuteNonQuery($"Sql #6", "Select 1 as test"); + LogTask.Warn("Warn message #2"); + }); + CustomTask.Execute($"Custom #1", () => {; }); + SqlTask.ExecuteNonQuery($"Sql #7", "Select 1 as test"); + + }); + Sequence.Execute("Test sub sequence 2.2", () => { + CustomTask.Execute($"Custom #2", () => {; }); + SqlTask.ExecuteNonQuery($"Sql #7", "Select 1 as test"); + }); + Sequence.Execute("Test sub sequence 2.3", () => { + SqlTask.ExecuteNonQuery($"Sql #8", "Select 1 as test"); + CustomTask.Execute($"Custom #2", () => {; }); + Sequence.Execute("Test sub sequence 3.3", () => { + SqlTask.ExecuteNonQuery($"Sql #9", "Select 1 as test"); + SqlTask.ExecuteNonQuery($"Sql #10", "Select 1 as test"); + LogTask.Error("Error message"); + }); + }); + }); + CustomTask.Execute($"Custom #3", () => {; }); + } + } +} diff --git a/ETLBoxTest/Logging/TestGetLoadProcessAsJSONTask.cs b/ETLBoxTest/Logging/TestGetLoadProcessAsJSONTask.cs new file mode 100644 index 00000000..afe6c2ac --- /dev/null +++ b/ETLBoxTest/Logging/TestGetLoadProcessAsJSONTask.cs @@ -0,0 +1,45 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Newtonsoft.Json.Linq; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestGetLoadProcessAsJSONTask { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + TestHelper.RecreateDatabase(testContext); + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + CreateLogTablesTask.CreateLog(); + } + + + [TestMethod] + public void TestGet1LoadProcessAsJSON() { + RunProcess1(); + string response = GetLoadProcessAsJSONTask.GetJSON(); + JArray json = JArray.Parse(response); + + Assert.AreEqual("Process 1", (string)json[0]["processName"]); + Assert.AreEqual(false, (bool)json[0]["isRunning"]); + Assert.AreEqual(true, (bool)json[0]["wasSuccessful"]); + Assert.AreEqual(false, (bool)json[0]["wasAborted"]); + Assert.AreEqual(true, (bool)json[0]["isFinished"]); + Assert.AreEqual(false, (bool)json[0]["isTransferCompleted"]); + Assert.AreEqual("Start", (string)json[0]["startMessage"]); + Assert.AreEqual("End", (string)json[0]["endMessage"]); + } + + + private void RunProcess1() { + StartLoadProcessTask.Start("Process 1","Start"); + SqlTask.ExecuteNonQuery($"Just some sql", "Select 1 as test"); + EndLoadProcessTask.End("End"); + } + + + } +} diff --git a/ETLBoxTest/Logging/TestLoadProcessLog.cs b/ETLBoxTest/Logging/TestLoadProcessLog.cs new file mode 100644 index 00000000..76a6d801 --- /dev/null +++ b/ETLBoxTest/Logging/TestLoadProcessLog.cs @@ -0,0 +1,154 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System; +using System.Threading.Tasks; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestLoadProcessLog { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + ControlFlow.STAGE = "SETUP"; + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + } + + [TestInitialize] + public void BeforeTests() { + TestHelper.RecreateDatabase(TestContext); + CreateLogTablesTask.CreateLog(); + Assert.IsTrue(new SqlTask("Check proc StartLoadProcess", $"select count(*) from sys.procedures where type = 'P' and name = 'StartLoadProcess' and schema_id = schema_id('etl')") { DisableLogging = true }.ExecuteScalarAsBool()); + Assert.IsTrue(new SqlTask("Check proc EndLoadProcess", $"select count(*) from sys.procedures where type = 'P' and name = 'EndLoadProcess' and schema_id = schema_id('etl')") { DisableLogging = true }.ExecuteScalarAsBool()); + Assert.IsTrue(new SqlTask("Check proc AbortLoadProcess", $"select count(*) from sys.procedures where type = 'P' and name = 'AbortLoadProcess' and schema_id = schema_id('etl')") { DisableLogging = true }.ExecuteScalarAsBool()); + } + + [TestMethod] + public void TestStartLoadProcessTask() { + DateTime beforeTask = DateTime.Now; + Task.Delay(10).Wait(); //Sql Server datetime is not that exact + + StartLoadProcessTask.Start("Test process 1"); + + DateTime afterTask = DateTime.Now; + Assert.IsNotNull(ControlFlow.CurrentLoadProcess); + Assert.AreEqual("Test process 1", ControlFlow.CurrentLoadProcess.ProcessName); + Assert.IsTrue(ControlFlow.CurrentLoadProcess.StartDate <= afterTask && ControlFlow.CurrentLoadProcess.StartDate >= beforeTask); + Assert.IsFalse(new SqlTask("Check if logging was disabled for start process task", "select count(*) from etl.Log") { DisableLogging = true }.ExecuteScalarAsBool()); + Assert.AreEqual(1, new SqlTask("Check if load process messages are correct", $"select count(*) from etl.LoadProcess where StartMessage is null and EndMessage is null and AbortMessage is null") { DisableLogging = true }.ExecuteScalar()); + Assert.AreEqual(1, new SqlTask("Check if load process entry is correct", $"select count(*) from etl.LoadProcess where IsRunning=1 and WasSuccessful=0 and WasAborted=0") { DisableLogging = true }.ExecuteScalar()); + + } + + [TestMethod] + public void TestStartLoadProcessTaskWithMessage() { + StartLoadProcessTask.Start("Test process 1", "Message 1", "SourceA"); + Assert.AreEqual(1, new SqlTask("Check if load process messages are correct", $"select count(*) from etl.LoadProcess where StartMessage = 'Message 1' and Source='SourceA' and EndMessage is null and AbortMessage is null") { DisableLogging = true }.ExecuteScalar()); + } + + [TestMethod] + public void TestEndLoadProcessTask() { + StartLoadProcessTask.Start("Test process 2"); + Assert.IsTrue(ControlFlow.CurrentLoadProcess.IsRunning == true); + DateTime beforeTask = DateTime.Now; + Task.Delay(10).Wait(); //Sql Server datetime is not that exact + + EndLoadProcessTask.End(); + + DateTime afterTask = DateTime.Now; + Assert.IsTrue(ControlFlow.CurrentLoadProcess.IsRunning == false); + Assert.IsTrue(ControlFlow.CurrentLoadProcess.WasSuccessful == true); + Assert.IsTrue(ControlFlow.CurrentLoadProcess.IsFinished == true); + Assert.IsTrue(ControlFlow.CurrentLoadProcess.EndDate <= afterTask && ControlFlow.CurrentLoadProcess.EndDate >= beforeTask); + Assert.IsFalse(new SqlTask("Check if logging was disabled for end process task", "select count(*) from etl.Log") { DisableLogging = true }.ExecuteScalarAsBool()); + Assert.AreEqual(1, new SqlTask("Check if load process entry is correct", $"select count(*) from etl.LoadProcess where IsRunning=0 and WasSuccessful=1 and WasAborted=0") { DisableLogging = true }.ExecuteScalar()); + Assert.AreEqual(1, new SqlTask("Check if load process messages are correct", $"select count(*) from etl.LoadProcess where StartMessage is null and EndMessage is null and AbortMessage is null") { DisableLogging = true }.ExecuteScalar()); + } + + [TestMethod] + public void TestAbortLoadProcessTask() { + StartLoadProcessTask.Start("Test process 3"); + Assert.IsTrue(ControlFlow.CurrentLoadProcess.IsRunning == true); + DateTime beforeTask = DateTime.Now; + Task.Delay(10).Wait(); //Sql Server datetime is not that exact + + AbortLoadProcessTask.Abort(ControlFlow.CurrentLoadProcess.LoadProcessKey, "AbortMessage"); + + DateTime afterTask = DateTime.Now; + Assert.IsTrue(ControlFlow.CurrentLoadProcess.IsRunning == false); + Assert.IsTrue(ControlFlow.CurrentLoadProcess.WasAborted == true); + Assert.IsTrue(ControlFlow.CurrentLoadProcess.EndDate <= afterTask && ControlFlow.CurrentLoadProcess.EndDate >= beforeTask); + Assert.IsTrue(ControlFlow.CurrentLoadProcess.AbortMessage == "AbortMessage"); + Assert.IsFalse(new SqlTask("Check if logging was disabled for end process task", "select count(*) from etl.Log") { DisableLogging = true }.ExecuteScalarAsBool()); + Assert.AreEqual(1, new SqlTask("Check if load process entry is correct", $"select count(*) from etl.LoadProcess where IsRunning=0 and WasSuccessful=0 and WasAborted=1") { DisableLogging = true }.ExecuteScalar()); + + } + + [TestMethod] + public void TestIsTransferCompletedForLoadProcessTask() { + StartLoadProcessTask.Start("Test process 4"); + Assert.IsTrue(ControlFlow.CurrentLoadProcess.IsRunning == true); + DateTime beforeTask = DateTime.Now; + Task.Delay(10).Wait(); //Sql Server datetime is not that exact + + TransferCompletedForLoadProcessTask.Complete(ControlFlow.CurrentLoadProcess.LoadProcessKey); + Assert.AreEqual(2, new SqlTask("Check if transfer completed was in log", "select count(*) from etl.Log where TaskType='TRANSFERCOMPLETE'") { DisableLogging = true }.ExecuteScalar()); + DateTime afterTask = DateTime.Now; + Assert.IsTrue(ControlFlow.CurrentLoadProcess.IsRunning == true); + Assert.IsTrue(ControlFlow.CurrentLoadProcess.TransferCompletedDate <= afterTask && ControlFlow.CurrentLoadProcess.TransferCompletedDate >= beforeTask); + } + + [TestMethod] + public void TestLoadProcessKeyInLog() { + StartLoadProcessTask.Start("Test process 5"); + SqlTask.ExecuteNonQuery("Test Task", "Select 1 as test"); + Assert.AreEqual(2, new SqlTask("Check if load process key is set", $"select count(*) from etl.Log where Message='Test Task' and LoadProcessKey = {ControlFlow.CurrentLoadProcess.LoadProcessKey}") { DisableLogging = true }.ExecuteScalar()); + } + + [TestMethod] + public void TestReadLastSuccessfulProcess() { + StartLoadProcessTask.Start("Test process 8"); + Task.Delay(10).Wait(); //Sql Server datetime is not that exact + EndLoadProcessTask.End(); + Task.Delay(10).Wait(); + StartLoadProcessTask.Start("Test process 9"); + Task.Delay(10).Wait(); //Sql Server datetime is not that exact + EndLoadProcessTask.End(); + + var lp = ReadLoadProcessTableTask.ReadWithOption(ReadOptions.ReadLastSuccessful); + Assert.IsTrue(lp.IsFinished); + Assert.IsTrue(lp.WasSuccessful); + Assert.IsFalse(lp.WasAborted); + Assert.AreEqual("Test process 9", lp.ProcessName); + Assert.AreEqual(2, new SqlTask("Check if finished processes exists", $"select count(*) from etl.LoadProcess where IsFinished=1") { DisableLogging = true }.ExecuteScalar()); + Assert.AreEqual(2, new SqlTask("Check if successful processes exists", $"select count(*) from etl.LoadProcess where WasSuccessful=1") { DisableLogging = true }.ExecuteScalar()); + } + + [TestMethod] + public void TestReadLastAbortedProcess() { + StartLoadProcessTask.Start("Test process 10"); + Task.Delay(10).Wait(); //Sql Server datetime is not that exact + EndLoadProcessTask.End(); + Task.Delay(10).Wait(); + StartLoadProcessTask.Start("Test process 11"); + Task.Delay(10).Wait(); //Sql Server datetime is not that exact + AbortLoadProcessTask.Abort(); + StartLoadProcessTask.Start("Test process 12"); + Task.Delay(10).Wait(); //Sql Server datetime is not that exact + EndLoadProcessTask.End(); + + var lp = ReadLoadProcessTableTask.ReadWithOption(ReadOptions.ReadLastAborted); + Assert.IsTrue(lp.IsFinished); + Assert.IsTrue(lp.WasAborted); + Assert.IsFalse(lp.WasSuccessful); + Assert.AreEqual("Test process 11", lp.ProcessName); + Assert.AreEqual(3, new SqlTask("Check if finished processes exists", $"select count(*) from etl.LoadProcess where IsFinished=1") { DisableLogging = true }.ExecuteScalar()); + Assert.AreEqual(2, new SqlTask("Check if successful processes exists", $"select count(*) from etl.LoadProcess where WasSuccessful=1") { DisableLogging = true }.ExecuteScalar()); + Assert.AreEqual(1, new SqlTask("Check if aborted processes exists", $"select count(*) from etl.LoadProcess where WasAborted=1") { DisableLogging = true }.ExecuteScalar()); + } + + } + +} diff --git a/ETLBoxTest/Logging/TestLogging.cs b/ETLBoxTest/Logging/TestLogging.cs new file mode 100644 index 00000000..2cc3075e --- /dev/null +++ b/ETLBoxTest/Logging/TestLogging.cs @@ -0,0 +1,88 @@ +using ALE.ETLBox; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +namespace ALE.ETLBoxTest { + [TestClass] + public class TestLogging { + public TestContext TestContext { get; set; } + public string ConnectionStringParameter => TestContext?.Properties["connectionString"].ToString(); + public string DBNameParameter => TestContext?.Properties["dbName"].ToString(); + + [ClassInitialize] + public static void TestInit(TestContext testContext) { + ControlFlow.STAGE = "SETUP"; + ControlFlow.CurrentDbConnection = new SqlConnectionManager(new ConnectionString(testContext.Properties["connectionString"].ToString())); + } + + [TestInitialize] + public void BeforeTests() { + TestHelper.RecreateDatabase(TestContext); + CreateLogTablesTask.CreateLog(); + Assert.IsFalse(new SqlTask("Check if logging was disabled for table creation", "select count(*) from etl.Log") { DisableLogging = true }.ExecuteScalarAsBool()); + Assert.IsTrue(new SqlTask("Check etl.Log table", $"select count(*) from sys.tables where type = 'U' and name = 'Log' and schema_id('etl') = schema_id") { DisableLogging = true }.ExecuteScalarAsBool()); + Assert.IsTrue(new SqlTask("Check etl.LoadProcess table", $"select count(*) from sys.tables where type = 'U' and name = 'LoadProcess' and schema_id('etl') = schema_id") { DisableLogging = true }.ExecuteScalarAsBool()); + } + + + [TestMethod] + public void TestErrorLogging() { + LogTask.Error("Error"); + LogTask.Warn("Warn"); + LogTask.Info("Info"); + Assert.AreEqual(3, SqlTask.ExecuteScalar("Check if default log works", "select count(*) from etl.Log where Message in ('Error','Warn','Info')")); + } + + [TestMethod] + public void TestRemoveLogTablesTask() { + RemoveLogTablesTask.Remove(); + CreateLogTablesTask.CreateLog(); + RemoveLogTablesTask.Remove(); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if tables are deleted", "select case when object_id('etl.LoadProcess') is null then 1 else 0 end")); + Assert.IsTrue(SqlTask.ExecuteScalarAsBool("Check if tables are deleted", "select case when object_id('etl.Log') is null then 1 else 0 end")); + } + + [TestMethod] + public void TestLoadProcessKeyIfRestarted() { + StartLoadProcessTask.Start("Test process 6"); + int? processKey1 = ControlFlow.CurrentLoadProcess.LoadProcessKey; + SqlTask.ExecuteNonQuery("Test Task", "Select 1 as test"); + Assert.AreEqual(2, new SqlTask("Check if load process key is set", $"select count(*) from etl.Log where Message='Test Task' and LoadProcessKey = {processKey1}") { DisableLogging = true }.ExecuteScalar()); + + StartLoadProcessTask.Start("Test process 7"); + int? processKey2 = ControlFlow.CurrentLoadProcess.LoadProcessKey; + Assert.AreNotEqual(processKey1, processKey2); + SqlTask.ExecuteNonQuery("Test Task", "Select 1 as test"); + Assert.AreEqual(2, new SqlTask("Check if load process key is set", $"select count(*) from etl.Log where Message='Test Task' and LoadProcessKey = {processKey2}") { DisableLogging = true }.ExecuteScalar()); + } + + [TestMethod] + public void TestLogCleanup() { + LogTask.Error("Error"); + LogTask.Warn("Warn"); + LogTask.Info("Info"); + CleanUpLogTask.CleanUp(0); + Assert.AreEqual(0, new SqlTask("Check if log table is empty", $"select count(*) from etl.Log ") { DisableLogging = true }.ExecuteScalar()); + } + + [TestMethod] + public void TestLoadProcessKeyForLogTask() { + StartLoadProcessTask.Start("Test process 8"); + int? processKey1 = ControlFlow.CurrentLoadProcess.LoadProcessKey; + LogTask.Error("Test1"); + LogTask.Warn("Test2"); + LogTask.Info("Test3"); + Assert.AreEqual(3, new SqlTask("Check if load process key is set", $"select count(*) from etl.Log where Message like 'Test%' and LoadProcessKey = {processKey1}") { DisableLogging = true }.ExecuteScalar()); + } + + [TestMethod] + public void TestHashValuesAreEqualFor() { + SqlTask.ExecuteNonQuery("Test Task - same name", "Select 1 as test"); + SqlTask.ExecuteNonQuery("Test Task - same name", "Select 2 as test"); + Assert.AreEqual(4, new SqlTask("Check if hash are equal", $@"select count(*) from etl.Log group by TaskHash") { DisableLogging = true }.ExecuteScalar()); + + } + + + } + +} diff --git a/ETLBoxTest/Logging/demolog_tobe.json b/ETLBoxTest/Logging/demolog_tobe.json new file mode 100644 index 00000000..e0c4b43e --- /dev/null +++ b/ETLBoxTest/Logging/demolog_tobe.json @@ -0,0 +1,405 @@ +{ + "children": [ + { + "children": [ + { + "children": [ + { + "children": [], + "logKey": 3, + "logDate": "2018-07-31T15:20:02.207", + "startDate": "2018-07-31T15:20:02.207", + "endDate": "2018-07-31T15:20:02.207", + "level": "Info", + "message": "Sql #2", + "taskType": "SQL", + "taskAction": "START", + "taskHash": "E3D45475E9E7F43662554F3E5E2E300095A26CE3", + "stage": "", + "source": "ETL" + }, + { + "children": [], + "logKey": 5, + "logDate": "2018-07-31T15:20:02.207", + "startDate": "2018-07-31T15:20:02.207", + "endDate": "2018-07-31T15:20:02.207", + "level": "Info", + "message": "Sql #3", + "taskType": "SQL", + "taskAction": "START", + "taskHash": "B21CCA48E53F2FB65D5E058D252C005A1C3F0308", + "stage": "", + "source": "ETL" + }, + { + "children": [], + "logKey": 7, + "logDate": "2018-07-31T15:20:02.207", + "startDate": "2018-07-31T15:20:02.207", + "level": "Warn", + "message": "Warn message #1", + "taskType": "LOG", + "taskAction": "LOG", + "taskHash": "ACDE4980380E776A71BFE138405717B7905F78E1", + "stage": "", + "source": "ETL" + } + ], + "logKey": 2, + "logDate": "2018-07-31T15:20:02.197", + "startDate": "2018-07-31T15:20:02.197", + "endDate": "2018-07-31T15:20:02.22", + "level": "Info", + "message": "Test sub sequence 1.1", + "taskType": "SEQUENCE", + "taskAction": "START", + "taskHash": "12CC91C993255378B7D1BB426D2F102EC699EDC9", + "stage": "", + "source": "ETL" + }, + { + "children": [ + { + "children": [], + "logKey": 10, + "logDate": "2018-07-31T15:20:02.22", + "startDate": "2018-07-31T15:20:02.22", + "endDate": "2018-07-31T15:20:02.22", + "level": "Info", + "message": "Sql #4", + "taskType": "SQL", + "taskAction": "START", + "taskHash": "D260A325C1D89FA39A2E494945750834A74F5DD5", + "stage": "", + "source": "ETL" + } + ], + "logKey": 9, + "logDate": "2018-07-31T15:20:02.22", + "startDate": "2018-07-31T15:20:02.22", + "endDate": "2018-07-31T15:20:02.22", + "level": "Info", + "message": "Test sub sequence 1.2", + "taskType": "SEQUENCE", + "taskAction": "START", + "taskHash": "CC779938B79DAD437B829F97DBC5D33C5ECB9CE6", + "stage": "", + "source": "ETL" + }, + { + "children": [ + { + "children": [ + { + "children": [ + { + "children": [], + "logKey": 16, + "logDate": "2018-07-31T15:20:02.22", + "startDate": "2018-07-31T15:20:02.22", + "endDate": "2018-07-31T15:20:02.237", + "level": "Info", + "message": "Sql #5", + "taskType": "SQL", + "taskAction": "START", + "taskHash": "CBD263E7460E94037ACF930BCCE6A7D4AD3C60B8", + "stage": "", + "source": "ETL" + }, + { + "children": [], + "logKey": 18, + "logDate": "2018-07-31T15:20:02.237", + "startDate": "2018-07-31T15:20:02.237", + "endDate": "2018-07-31T15:20:02.237", + "level": "Info", + "message": "Sql #6", + "taskType": "SQL", + "taskAction": "START", + "taskHash": "5DACA85B78B7BBBCB2AA250281502AB08A9975FA", + "stage": "", + "source": "ETL" + }, + { + "children": [], + "logKey": 20, + "logDate": "2018-07-31T15:20:02.237", + "startDate": "2018-07-31T15:20:02.237", + "level": "Warn", + "message": "Warn message #2", + "taskType": "LOG", + "taskAction": "LOG", + "taskHash": "20E96D78D12F73ADA72200DEA541497DCFD18085", + "stage": "", + "source": "ETL" + } + ], + "logKey": 15, + "logDate": "2018-07-31T15:20:02.22", + "startDate": "2018-07-31T15:20:02.22", + "endDate": "2018-07-31T15:20:02.237", + "level": "Info", + "message": "Test sub sequence 3.1", + "taskType": "SEQUENCE", + "taskAction": "START", + "taskHash": "46A96FA65EA26EC5523B65B4E4829151346FB996", + "stage": "", + "source": "ETL" + }, + { + "children": [], + "logKey": 22, + "logDate": "2018-07-31T15:20:02.237", + "startDate": "2018-07-31T15:20:02.237", + "endDate": "2018-07-31T15:20:02.237", + "level": "Info", + "message": "Custom #1", + "taskType": "CUSTOM", + "taskAction": "START", + "taskHash": "58293C8CEC842C107DA1A0AA56A2AE882F29E9E8", + "stage": "", + "source": "ETL" + }, + { + "children": [], + "logKey": 24, + "logDate": "2018-07-31T15:20:02.237", + "startDate": "2018-07-31T15:20:02.237", + "endDate": "2018-07-31T15:20:02.25", + "level": "Info", + "message": "Sql #7", + "taskType": "SQL", + "taskAction": "START", + "taskHash": "D532EC7240C9BAEC5CC950EA5F06053A20498982", + "stage": "", + "source": "ETL" + } + ], + "logKey": 14, + "logDate": "2018-07-31T15:20:02.22", + "startDate": "2018-07-31T15:20:02.22", + "endDate": "2018-07-31T15:20:02.25", + "level": "Info", + "message": "Test sub sequence 2.1", + "taskType": "SEQUENCE", + "taskAction": "START", + "taskHash": "5E14E83DB6D0D32FFDE8AA46CFE8B6AA3EB181CC", + "stage": "", + "source": "ETL" + }, + { + "children": [ + { + "children": [], + "logKey": 28, + "logDate": "2018-07-31T15:20:02.25", + "startDate": "2018-07-31T15:20:02.25", + "endDate": "2018-07-31T15:20:02.25", + "level": "Info", + "message": "Custom #2", + "taskType": "CUSTOM", + "taskAction": "START", + "taskHash": "AD2813146204446FF7B200698BC9CF9F6835EC5F", + "stage": "", + "source": "ETL" + }, + { + "children": [], + "logKey": 30, + "logDate": "2018-07-31T15:20:02.25", + "startDate": "2018-07-31T15:20:02.25", + "endDate": "2018-07-31T15:20:02.267", + "level": "Info", + "message": "Sql #7", + "taskType": "SQL", + "taskAction": "START", + "taskHash": "403D057C79E38F89E5D1ACBF780A3F8634D3C415", + "stage": "", + "source": "ETL" + } + ], + "logKey": 27, + "logDate": "2018-07-31T15:20:02.25", + "startDate": "2018-07-31T15:20:02.25", + "endDate": "2018-07-31T15:20:02.267", + "level": "Info", + "message": "Test sub sequence 2.2", + "taskType": "SEQUENCE", + "taskAction": "START", + "taskHash": "2A6F4B61E31537F1315EC21B8BFF5DAD8E950986", + "stage": "", + "source": "ETL" + }, + { + "children": [ + { + "children": [], + "logKey": 34, + "logDate": "2018-07-31T15:20:02.267", + "startDate": "2018-07-31T15:20:02.267", + "endDate": "2018-07-31T15:20:02.287", + "level": "Info", + "message": "Sql #8", + "taskType": "SQL", + "taskAction": "START", + "taskHash": "A7292E8E7E496344CAC846F1A31D5CA51786C462", + "stage": "", + "source": "ETL" + }, + { + "children": [], + "logKey": 36, + "logDate": "2018-07-31T15:20:02.287", + "startDate": "2018-07-31T15:20:02.287", + "endDate": "2018-07-31T15:20:02.287", + "level": "Info", + "message": "Custom #2", + "taskType": "CUSTOM", + "taskAction": "START", + "taskHash": "5E4AA699E4140DE530F984278E425C0333835B80", + "stage": "", + "source": "ETL" + }, + { + "children": [ + { + "children": [], + "logKey": 39, + "logDate": "2018-07-31T15:20:02.297", + "startDate": "2018-07-31T15:20:02.297", + "endDate": "2018-07-31T15:20:02.297", + "level": "Info", + "message": "Sql #9", + "taskType": "SQL", + "taskAction": "START", + "taskHash": "652420FD2C5FF12E9A2C07DBBD708081AA1CA273", + "stage": "", + "source": "ETL" + }, + { + "children": [], + "logKey": 41, + "logDate": "2018-07-31T15:20:02.297", + "startDate": "2018-07-31T15:20:02.297", + "endDate": "2018-07-31T15:20:02.297", + "level": "Info", + "message": "Sql #10", + "taskType": "SQL", + "taskAction": "START", + "taskHash": "21DA6D55FC3ED2346109073FC4C6BEBC4FF3FD1F", + "stage": "", + "source": "ETL" + }, + { + "children": [], + "logKey": 43, + "logDate": "2018-07-31T15:20:02.297", + "startDate": "2018-07-31T15:20:02.297", + "level": "Error", + "message": "Error message", + "taskType": "LOG", + "taskAction": "LOG", + "taskHash": "834388851BADD352B3C7B03875048D2EE29853DE", + "stage": "", + "source": "ETL" + } + ], + "logKey": 38, + "logDate": "2018-07-31T15:20:02.287", + "startDate": "2018-07-31T15:20:02.287", + "endDate": "2018-07-31T15:20:02.297", + "level": "Info", + "message": "Test sub sequence 3.3", + "taskType": "SEQUENCE", + "taskAction": "START", + "taskHash": "0D4E74C27BA7494398E702B2843B3F62480F5412", + "stage": "", + "source": "ETL" + } + ], + "logKey": 33, + "logDate": "2018-07-31T15:20:02.267", + "startDate": "2018-07-31T15:20:02.267", + "endDate": "2018-07-31T15:20:02.297", + "level": "Info", + "message": "Test sub sequence 2.3", + "taskType": "SEQUENCE", + "taskAction": "START", + "taskHash": "E2F8E4174B33CFA5BCFC7CBC6A539658883C5EA6", + "stage": "", + "source": "ETL" + } + ], + "logKey": 13, + "logDate": "2018-07-31T15:20:02.22", + "startDate": "2018-07-31T15:20:02.22", + "endDate": "2018-07-31T15:20:02.297", + "level": "Info", + "message": "Test sub sequence 1.3", + "taskType": "SEQUENCE", + "taskAction": "START", + "taskHash": "C042E3A1F003A12DA2A9530A9730F71974010441", + "stage": "", + "source": "ETL" + }, + { + "children": [], + "logKey": 47, + "logDate": "2018-07-31T15:20:02.297", + "startDate": "2018-07-31T15:20:02.297", + "endDate": "2018-07-31T15:20:02.297", + "level": "Info", + "message": "Custom #3", + "taskType": "CUSTOM", + "taskAction": "START", + "taskHash": "C586DC44ED9BBE9844C49BB083FBBEC4F3051A0B", + "stage": "", + "source": "ETL" + } + ], + "logKey": 1, + "logDate": "2018-07-31T15:20:02.117", + "startDate": "2018-07-31T15:20:02.117", + "endDate": "2018-07-31T15:20:02.297", + "level": "Info", + "message": "Test sequence 1", + "taskType": "SUBPACKAGE", + "taskAction": "START", + "taskHash": "65E85CF4DA4D6C46E11833CA46A356B452CCF0C2", + "stage": "", + "source": "ETL" + }, + { + "children": [], + "logKey": 50, + "logDate": "2018-07-31T15:20:02.297", + "startDate": "2018-07-31T15:20:02.297", + "endDate": "2018-07-31T15:20:02.313", + "level": "Info", + "message": "Sql #1", + "taskType": "SQL", + "taskAction": "START", + "taskHash": "03F002179C229EC17A8A8651DAE24FBFEB90DC50", + "stage": "", + "source": "ETL" + }, + { + "children": [], + "logKey": 52, + "logDate": "2018-07-31T15:20:02.313", + "startDate": "2018-07-31T15:20:02.313", + "level": "Info", + "message": "Info message", + "taskType": "LOG", + "taskAction": "LOG", + "taskHash": "C6B1D03DA05D518095EEBBC80747825C8AEF8322", + "stage": "", + "source": "ETL" + } + ], + "logKey": 0, + "logDate": "0001-01-01T00:00:00", + "startDate": "0001-01-01T00:00:00", + "taskType": "ROOT" +} \ No newline at end of file diff --git a/ETLBoxTest/Properties/AssemblyInfo.cs b/ETLBoxTest/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..05a6e8be --- /dev/null +++ b/ETLBoxTest/Properties/AssemblyInfo.cs @@ -0,0 +1,35 @@ +using System.Reflection; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("ETLBox")] +[assembly: AssemblyDescription("It's all in the box! Run all your ETL jobs with this awesome C# class library.")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("ALE")] +[assembly: AssemblyProduct("ETLBox")] +[assembly: AssemblyCopyright("Copyright © Andreas Lennartz 2018")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("e18c5029-9fa4-4846-828c-1b42d5d18c3c")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("0.2.0.0")] +[assembly: AssemblyFileVersion("0.2.0.0")] diff --git a/ETLBoxTest/Tests.runsettings b/ETLBoxTest/Tests.runsettings new file mode 100644 index 00000000..962d5b1f --- /dev/null +++ b/ETLBoxTest/Tests.runsettings @@ -0,0 +1,13 @@ + + + + + 1 + x64 + + + + + + + diff --git a/ETLBoxTest/packages.config b/ETLBoxTest/packages.config new file mode 100644 index 00000000..cf9a6945 --- /dev/null +++ b/ETLBoxTest/packages.config @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/Libs/Microsoft.AnalysisServices.AdomdClient.dll b/Libs/Microsoft.AnalysisServices.AdomdClient.dll new file mode 100644 index 00000000..685801e0 Binary files /dev/null and b/Libs/Microsoft.AnalysisServices.AdomdClient.dll differ diff --git a/Libs/Microsoft.AnalysisServices.Core.dll b/Libs/Microsoft.AnalysisServices.Core.dll new file mode 100644 index 00000000..ca387f7c Binary files /dev/null and b/Libs/Microsoft.AnalysisServices.Core.dll differ diff --git a/Libs/Microsoft.AnalysisServices.Tabular.Json.dll b/Libs/Microsoft.AnalysisServices.Tabular.Json.dll new file mode 100644 index 00000000..89ca9d0f Binary files /dev/null and b/Libs/Microsoft.AnalysisServices.Tabular.Json.dll differ diff --git a/Libs/Microsoft.AnalysisServices.Tabular.dll b/Libs/Microsoft.AnalysisServices.Tabular.dll new file mode 100644 index 00000000..c41d69ab Binary files /dev/null and b/Libs/Microsoft.AnalysisServices.Tabular.dll differ diff --git a/Libs/Microsoft.AnalysisServices.Xmla.dll b/Libs/Microsoft.AnalysisServices.Xmla.dll new file mode 100644 index 00000000..90a0452b Binary files /dev/null and b/Libs/Microsoft.AnalysisServices.Xmla.dll differ diff --git a/Libs/Microsoft.AnalysisServices.dll b/Libs/Microsoft.AnalysisServices.dll new file mode 100644 index 00000000..62eb1e7a Binary files /dev/null and b/Libs/Microsoft.AnalysisServices.dll differ diff --git a/Libs/Microsoft.SqlServer.BatchParser.dll b/Libs/Microsoft.SqlServer.BatchParser.dll new file mode 100644 index 00000000..579e70e9 Binary files /dev/null and b/Libs/Microsoft.SqlServer.BatchParser.dll differ diff --git a/Libs/Microsoft.SqlServer.ConnectionInfo.dll b/Libs/Microsoft.SqlServer.ConnectionInfo.dll new file mode 100644 index 00000000..9ceef994 Binary files /dev/null and b/Libs/Microsoft.SqlServer.ConnectionInfo.dll differ diff --git a/Libs/Microsoft.SqlServer.Dmf.dll b/Libs/Microsoft.SqlServer.Dmf.dll new file mode 100644 index 00000000..be587614 Binary files /dev/null and b/Libs/Microsoft.SqlServer.Dmf.dll differ diff --git a/Libs/Microsoft.SqlServer.Management.Sdk.Sfc.dll b/Libs/Microsoft.SqlServer.Management.Sdk.Sfc.dll new file mode 100644 index 00000000..a59befb8 Binary files /dev/null and b/Libs/Microsoft.SqlServer.Management.Sdk.Sfc.dll differ diff --git a/Libs/Microsoft.SqlServer.ServiceBrokerEnum.dll b/Libs/Microsoft.SqlServer.ServiceBrokerEnum.dll new file mode 100644 index 00000000..495c1a33 Binary files /dev/null and b/Libs/Microsoft.SqlServer.ServiceBrokerEnum.dll differ diff --git a/Libs/Microsoft.SqlServer.Smo.dll b/Libs/Microsoft.SqlServer.Smo.dll new file mode 100644 index 00000000..19cf7b29 Binary files /dev/null and b/Libs/Microsoft.SqlServer.Smo.dll differ diff --git a/Libs/Microsoft.SqlServer.SqlEnum.dll b/Libs/Microsoft.SqlServer.SqlEnum.dll new file mode 100644 index 00000000..54b9c1f2 Binary files /dev/null and b/Libs/Microsoft.SqlServer.SqlEnum.dll differ