Skip to content

Commit

Permalink
refactor: code cleaning
Browse files Browse the repository at this point in the history
feat: add nuget info
  • Loading branch information
SarcasticMoose committed Jun 5, 2024
1 parent 5b133ff commit 962a157
Show file tree
Hide file tree
Showing 11 changed files with 78 additions and 39 deletions.
6 changes: 6 additions & 0 deletions McdaMethods.sln
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "McdaToolkit", "McdaToolkit\
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "McdaToolkit.UnitTests", "McdaToolkit.UnitTests\McdaToolkit.UnitTests.csproj", "{675DE565-59A0-4865-A306-B12FCC64EF1F}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "TestUddsadsada", "TestUddsadsada\TestUddsadsada.csproj", "{C7D4020C-DC44-47E3-BB9C-2D4CCFA88A82}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Expand All @@ -21,6 +23,10 @@ Global
{675DE565-59A0-4865-A306-B12FCC64EF1F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{675DE565-59A0-4865-A306-B12FCC64EF1F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{675DE565-59A0-4865-A306-B12FCC64EF1F}.Release|Any CPU.Build.0 = Release|Any CPU
{C7D4020C-DC44-47E3-BB9C-2D4CCFA88A82}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C7D4020C-DC44-47E3-BB9C-2D4CCFA88A82}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C7D4020C-DC44-47E3-BB9C-2D4CCFA88A82}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C7D4020C-DC44-47E3-BB9C-2D4CCFA88A82}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
Expand Down
2 changes: 1 addition & 1 deletion McdaToolkit.UnitTests/McdaMethodsTests.cs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
using FluentAssertions;
using MathNet.Numerics;
using McdaToolkit.Mcda;
using McdaToolkit.McdaMethods;
using McdaToolkit.UnitTests.Helpers;
using Xunit.Abstractions;

Expand Down
6 changes: 5 additions & 1 deletion McdaToolkit/Extensions/EnumerableExtentions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,11 @@ internal static class EnumerableExtentions
{
public static IEnumerable<(T item, int index)> Indexed<T>(this IEnumerable<T> source)
{
ArgumentNullException.ThrowIfNull(source);
if (source is null)
{
throw new ArgumentNullException();
}

var i = 0;
foreach (var item in source)
{
Expand Down
6 changes: 0 additions & 6 deletions McdaToolkit/Mcda/Interfaces/IMethod.cs

This file was deleted.

9 changes: 9 additions & 0 deletions McdaToolkit/McdaMethods/Interfaces/IMethod.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
using MathNet.Numerics.LinearAlgebra;

namespace McdaToolkit.McdaMethods.Interfaces;

public interface IMethod
{
Vector<double> Calculate(double[,] matrix, double[] weights, int[] criteriaDirections);
Vector<double> Calculate(IEnumerable<IEnumerable<double>> matrix, double[] weights, int[] criteriaDirections);
}
21 changes: 21 additions & 0 deletions McdaToolkit/McdaMethods/McdaMethod.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
using MathNet.Numerics.LinearAlgebra;
using McdaToolkit.McdaMethods.Interfaces;

namespace McdaToolkit.McdaMethods;

public abstract class McdaMethod : IMethod
{
public Vector<double> Calculate(double[,] matrix, double[] weights, int[] criteriaDirections)
{
var matrixTypeOfMatrix = Matrix<double>.Build.DenseOfArray(matrix);
return Calculate(matrixTypeOfMatrix, weights, criteriaDirections);
}

public Vector<double> Calculate(IEnumerable<IEnumerable<double>> matrix, double[] weights, int[] criteriaDirections)
{
var matrixTypeOfMatrix = Matrix<double>.Build.DenseOfRows(matrix);
return Calculate(matrixTypeOfMatrix, weights, criteriaDirections);
}

protected abstract Vector<double> Calculate(Matrix<double> matrix, double[] weights, int[] criteriaDirections);
}
Original file line number Diff line number Diff line change
@@ -1,33 +1,27 @@
using System.Numerics;
using MathNet.Numerics.LinearAlgebra;
using McdaToolkit.Enums;
using McdaToolkit.Mcda.Interfaces;
using McdaToolkit.McdaMethods.Interfaces;
using McdaToolkit.Normalization;
using McdaToolkit.Options;

namespace McdaToolkit.Mcda;
namespace McdaToolkit.McdaMethods;

public class TopsisMethod : IMethod
public class TopsisMethod : McdaMethod
{
private DataNormalizationService _normalizationServiceService;

public TopsisMethod()
{
{
_normalizationServiceService = new DataNormalizationService(NormalizationMethodEnum.MinMax);
}

public TopsisMethod(McdaMethodOptions options)
{
_normalizationServiceService = new DataNormalizationService(options.NormalizationMethodEnum);
}

public MathNet.Numerics.LinearAlgebra.Vector<double> Calculate(double[,] matrix, double[] weights,
int[] criteriaDirections)
public TopsisMethod(McdaMethodOptions options)
{
var convertedMatrix = Matrix<double>.Build.DenseOfArray(matrix);
return Calculate(convertedMatrix,weights, criteriaDirections);
_normalizationServiceService = new DataNormalizationService(options.NormalizationMethodEnum);
}

private MathNet.Numerics.LinearAlgebra.Vector<double> Calculate(Matrix<double>? matrix, double[] weights, int[] criteriaDirections)
protected override Vector<double> Calculate(Matrix<double> matrix, double[] weights,
int[] criteriaDirections)
{
var normalizedMatrix = _normalizationServiceService.NormalizeMatrix(matrix, criteriaDirections);
var weightedMatrix = WeightedMatrix(normalizedMatrix, weights);
Expand All @@ -41,6 +35,7 @@ private MathNet.Numerics.LinearAlgebra.Vector<double> Calculate(Matrix<double>?

return topsisScores;
}

private Matrix<double> WeightedMatrix(Matrix<double> matrix, double[] weights)
{
for (int i = 0; i < matrix.RowCount; i++)
Expand All @@ -50,20 +45,24 @@ private Matrix<double> WeightedMatrix(Matrix<double> matrix, double[] weights)
matrix[i, j] *= weights[j];
}
}

return matrix;
}
private MathNet.Numerics.LinearAlgebra.Vector<double> IdealValues(Matrix<double> matrix, bool pis)

private Vector<double> IdealValues(Matrix<double> matrix, bool pis)
{
return MathNet.Numerics.LinearAlgebra.Vector<double>.Build
return Vector<double>.Build
.Dense(matrix.ColumnCount, i =>
{
var columnValues = matrix.Column(i).ToArray();
return pis ? columnValues.Max() : columnValues.Min();
});
}
private MathNet.Numerics.LinearAlgebra.Vector<double> CalculateEuclideanDistance(Matrix<double> matrix, MathNet.Numerics.LinearAlgebra.Vector<double> ideal)

private Vector<double> CalculateEuclideanDistance(Matrix<double> matrix,
Vector<double> ideal)
{
return MathNet.Numerics.LinearAlgebra.Vector<double>.Build
return Vector<double>.Build
.DenseOfArray(matrix
.EnumerateRows()
.Select(row => row
Expand All @@ -73,9 +72,9 @@ private MathNet.Numerics.LinearAlgebra.Vector<double> CalculateEuclideanDistance
.Sum())
.ToArray());
}
private MathNet.Numerics.LinearAlgebra.Vector<double> CalculateTopsisScores(MathNet.Numerics.LinearAlgebra.Vector<double> distanceToBest, MathNet.Numerics.LinearAlgebra.Vector<double> distanceToWorst)

private Vector<double> CalculateTopsisScores(Vector<double> distanceToBest, Vector<double> distanceToWorst)
{
return distanceToWorst
.PointwiseDivide(distanceToBest.Add(distanceToWorst));
return distanceToWorst.PointwiseDivide(distanceToBest.Add(distanceToWorst));
}
}
}
11 changes: 9 additions & 2 deletions McdaToolkit/McdaToolkit.csproj
Original file line number Diff line number Diff line change
@@ -1,12 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<!--Nuget info-->
<PackageId>McdaToolkit</PackageId>
<Version>1.0.1</Version>
<Authors>Jakub Tokarczyk</Authors>
<LangVersion>latest</LangVersion>
<TargetFrameworks>netstandard2.1;net6.0;net7.0;net8.0</TargetFrameworks>
<Copyright>© 2024 Jakub Tokarczyk</Copyright>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
<RepositoryUrl>https://github.com/SarcasticMoose/mcda-toolkit</RepositoryUrl>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="MathNet.Numerics" Version="5.0.0" />
</ItemGroup>
Expand Down
2 changes: 1 addition & 1 deletion McdaToolkit/Normalization/DataNormalizationService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ public class DataNormalizationService(NormalizationMethodEnum methodEnum) : IDat
{
private readonly INormalizationMethod _method = NormalizationFactory.CreateNormalizationMethod(methodEnum);

public Matrix<double> NormalizeMatrix(Matrix<double>? matrix, int[] criteriaTypes)
public Matrix<double> NormalizeMatrix(Matrix<double> matrix, int[] criteriaTypes)
{
var normalizedMatrix = Matrix<double>.Build.Dense(matrix.RowCount, matrix.ColumnCount);

Expand Down
2 changes: 1 addition & 1 deletion McdaToolkit/Normalization/Interfaces/IDataNormalization.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,5 @@ namespace McdaToolkit.Normalization.Interfaces;

public interface IDataNormalization
{
Matrix<double> NormalizeMatrix(Matrix<double>? matrix, int[] criteriaTypes);
Matrix<double> NormalizeMatrix(Matrix<double> matrix, int[] criteriaTypes);
}
7 changes: 3 additions & 4 deletions McdaToolkit/NormalizationMethods/MinMaxNormalization.cs
Original file line number Diff line number Diff line change
@@ -1,19 +1,18 @@
using System.Numerics;
using McdaToolkit.NormalizationMethods.Interfaces;
using MathNet.Numerics.LinearAlgebra;

namespace McdaToolkit.NormalizationMethods;

public class MinMaxNormalization : INormalizationMethod
{
public MathNet.Numerics.LinearAlgebra.Vector<double> Normalize(MathNet.Numerics.LinearAlgebra.Vector<double> data, bool cost = false)
public Vector<double> Normalize(MathNet.Numerics.LinearAlgebra.Vector<double> data, bool cost = false)
{
var max = data.Maximum();
var min = data.Minimum();
var difference = max - min;

if (Math.Abs(difference) < 1.11e-16)
{
return MathNet.Numerics.LinearAlgebra.Vector<double>.Build.Dense(data.Count, (i) => 1);
return Vector<double>.Build.Dense(data.Count, (i) => 1);
}

if (cost)
Expand Down

0 comments on commit 962a157

Please sign in to comment.