Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
53 commits
Select commit Hold shift + click to select a range
4db51df
Update editor config
mdabros Feb 15, 2025
1993737
Fix naming violations
mdabros Feb 15, 2025
c38e972
Rename
mdabros Feb 15, 2025
5387e1d
Use nameof
mdabros Feb 15, 2025
e471f1a
Use collection expression
mdabros Feb 15, 2025
6a6530b
Use IsEmpty
mdabros Feb 15, 2025
9851422
collection expression
mdabros Feb 15, 2025
978e859
Fix
mdabros Feb 15, 2025
0c4d86d
Simplify
mdabros Feb 15, 2025
439ffac
Fix
mdabros Feb 15, 2025
24fe10e
Fix
mdabros Feb 15, 2025
436854c
Add roslyn analyzers and formatting.Analyzers
mdabros Feb 15, 2025
3106962
Add braces
mdabros Feb 15, 2025
ea7b049
Use collection expression
mdabros Feb 15, 2025
50506a0
Simplify
mdabros Feb 15, 2025
fbff8f3
Remove blank lines
mdabros Feb 15, 2025
9abbf29
Use trailing comma
mdabros Feb 15, 2025
6cb49e9
Remove empty statement
mdabros Feb 15, 2025
6423b72
Use elemental acess
mdabros Feb 15, 2025
1073bc1
Use elemental access
mdabros Feb 15, 2025
36ec05e
Simplify
mdabros Feb 15, 2025
8a8579c
Conditional acess
mdabros Feb 15, 2025
32a71d3
Combine
mdabros Feb 15, 2025
0662105
Use compund
mdabros Feb 15, 2025
b29db0b
Simplify
mdabros Feb 15, 2025
15e8aa7
Remove blank line
mdabros Feb 15, 2025
37a6f1a
Add
mdabros Feb 15, 2025
ba82ccb
Remove trainling white space
mdabros Feb 15, 2025
5698f2d
REmove
mdabros Feb 15, 2025
ebc418d
Properties
mdabros Feb 15, 2025
59062c6
Use operatpr
mdabros Feb 15, 2025
fbac096
Simplify
mdabros Feb 15, 2025
eec7d5e
Simplify
mdabros Feb 15, 2025
31f5f06
Remove empty summary
mdabros Feb 15, 2025
6265608
Remove braces
mdabros Feb 15, 2025
acc29a9
Remove unecesarry
mdabros Feb 15, 2025
3d8cc83
Use appendformat
mdabros Feb 15, 2025
cd39b2f
Simplify
mdabros Feb 15, 2025
bf4c21e
Refactor
mdabros Feb 15, 2025
d3ab4c1
Simplify
mdabros Feb 15, 2025
9261258
Summary
mdabros Feb 15, 2025
28f37fd
Trailing comma
mdabros Feb 15, 2025
a7457d6
Simplify
mdabros Feb 15, 2025
90266dc
Join string expressions
mdabros Feb 15, 2025
bba366d
Disable warning
mdabros Feb 15, 2025
b6fdbf0
Line after usings
mdabros Feb 15, 2025
f5e4b6f
Simplify
mdabros Feb 15, 2025
c3f77a8
usings
mdabros Feb 15, 2025
7b80f18
Simplify
mdabros Feb 15, 2025
201257d
supress
mdabros Feb 15, 2025
67f5285
Fix tests
mdabros Feb 15, 2025
8f22aac
Disable data
mdabros Feb 15, 2025
334a6ac
Try reducing requirements during optimization to get results further …
mdabros Feb 15, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
1,202 changes: 1,141 additions & 61 deletions .editorconfig

Large diffs are not rendered by default.

8 changes: 7 additions & 1 deletion src/Directory.Build.props
Original file line number Diff line number Diff line change
Expand Up @@ -23,16 +23,22 @@
<PublishRelease>true</PublishRelease>
<PackRelease>true</PackRelease>

<!-- https://github.com/dotnet/roslyn/issues/41640 🤦 -->
<GenerateDocumentationFile>true</GenerateDocumentationFile>
<NoWarn>$(NoWarn);CS1591;RCS1138;CS1668</NoWarn>

<AnalysisLevel>latest</AnalysisLevel>
<EnforceCodeStyleInBuild>true</EnforceCodeStyleInBuild>
<RunAnalyzersDuringBuild>true</RunAnalyzersDuringBuild>
<EnableNETAnalyzers>true</EnableNETAnalyzers>
<CodeAnalysisTreatWarningsAsErrors>true</CodeAnalysisTreatWarningsAsErrors>
<NoWarn>CS1591;CS1668</NoWarn>

</PropertyGroup>

<ItemGroup>
<PackageReference Include="Roslynator.Analyzers" Version="4.12.11" PrivateAssets="All"/>
<PackageReference Include="Roslynator.Formatting.Analyzers" Version="4.12.11" PrivateAssets="All"/>
</ItemGroup>

<Import Project="$(MSBuildThisFileDirectory)\OutputBuildProps.props" />

Expand Down
1 change: 0 additions & 1 deletion src/SharpLearning.AdaBoost.Test/DataSetUtilities.cs
Original file line number Diff line number Diff line change
Expand Up @@ -267,5 +267,4 @@ public static (F64Matrix observations, double[] targets) LoadGlassDataSet()
1.52065;14.36;0;2.02;73.42;0;8.44;1.64;0;7
1.51651;14.38;0;1.94;73.61;0;8.48;1.57;0;7
1.51711;14.23;0;2.08;73.36;0;8.62;1.67;0;7";

}
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ public void ClassificationAdaBoostModel_PredictProbability_Single()

Assert.AreEqual(0.038461538461538464, error, 0.0000001);

var expected = new ProbabilityPrediction[] { new(0, new Dictionary<double, double> { { 0, 0.553917222019051 }, { 1, 0.446082777980949 }, }), new(1, new Dictionary<double, double> { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new(0, new Dictionary<double, double> { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new(0, new Dictionary<double, double> { { 0, 0.564961572849738 }, { 1, 0.435038427150263 }, }), new(1, new Dictionary<double, double> { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(1, new Dictionary<double, double> { { 0, 0.417527839140627 }, { 1, 0.582472160859373 }, }), new(1, new Dictionary<double, double> { { 0, 0.409988559960094 }, { 1, 0.590011440039906 }, }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary<double, double> { { 0, 0.461264944069783 }, { 1, 0.538735055930217 }, }), new(0, new Dictionary<double, double> { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new(0, new Dictionary<double, double> { { 0, 0.549503146925505 }, { 1, 0.450496853074495 }, }), new(0, new Dictionary<double, double> { { 0, 0.537653803214063 }, { 1, 0.462346196785938 }, }), new(1, new Dictionary<double, double> { { 0, 0.37650723540928 }, { 1, 0.62349276459072 }, }), new(0, new Dictionary<double, double> { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(0, new Dictionary<double, double> { { 0, 0.524371409810479 }, { 1, 0.475628590189522 }, }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary<double, double> { { 0, 0.471117379964633 }, { 1, 0.528882620035367 }, }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary<double, double> { { 0, 0.404976804073458 }, { 1, 0.595023195926542 }, }), new(0, new Dictionary<double, double> { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), };
var expected = new ProbabilityPrediction[] { new(0, new Dictionary<double, double> { { 0, 0.553917222019051 }, { 1, 0.446082777980949 } }), new(1, new Dictionary<double, double> { { 0, 0.455270122123639 }, { 1, 0.544729877876361 } }), new(0, new Dictionary<double, double> { { 0, 0.590671208378385 }, { 1, 0.409328791621616 } }), new(0, new Dictionary<double, double> { { 0, 0.564961572849738 }, { 1, 0.435038427150263 } }), new(1, new Dictionary<double, double> { { 0, 0.455270122123639 }, { 1, 0.544729877876361 } }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(1, new Dictionary<double, double> { { 0, 0.417527839140627 }, { 1, 0.582472160859373 } }), new(1, new Dictionary<double, double> { { 0, 0.409988559960094 }, { 1, 0.590011440039906 } }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary<double, double> { { 0, 0.461264944069783 }, { 1, 0.538735055930217 } }), new(0, new Dictionary<double, double> { { 0, 0.590671208378385 }, { 1, 0.409328791621616 } }), new(0, new Dictionary<double, double> { { 0, 0.549503146925505 }, { 1, 0.450496853074495 } }), new(0, new Dictionary<double, double> { { 0, 0.537653803214063 }, { 1, 0.462346196785938 } }), new(1, new Dictionary<double, double> { { 0, 0.37650723540928 }, { 1, 0.62349276459072 } }), new(0, new Dictionary<double, double> { { 0, 0.573579890413618 }, { 1, 0.426420109586382 } }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(0, new Dictionary<double, double> { { 0, 0.524371409810479 }, { 1, 0.475628590189522 } }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary<double, double> { { 0, 0.471117379964633 }, { 1, 0.528882620035367 } }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary<double, double> { { 0, 0.404976804073458 }, { 1, 0.595023195926542 } }), new(0, new Dictionary<double, double> { { 0, 0.573579890413618 }, { 1, 0.426420109586382 } }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }) };
CollectionAssert.AreEqual(expected, actual);
}

Expand All @@ -90,7 +90,7 @@ public void ClassificationAdaBoostModel_PredictProbability_Multiple()

Assert.AreEqual(0.038461538461538464, error, 0.0000001);

var expected = new ProbabilityPrediction[] { new(0, new Dictionary<double, double> { { 0, 0.553917222019051 }, { 1, 0.446082777980949 }, }), new(1, new Dictionary<double, double> { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new(0, new Dictionary<double, double> { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new(0, new Dictionary<double, double> { { 0, 0.564961572849738 }, { 1, 0.435038427150263 }, }), new(1, new Dictionary<double, double> { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(1, new Dictionary<double, double> { { 0, 0.417527839140627 }, { 1, 0.582472160859373 }, }), new(1, new Dictionary<double, double> { { 0, 0.409988559960094 }, { 1, 0.590011440039906 }, }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary<double, double> { { 0, 0.461264944069783 }, { 1, 0.538735055930217 }, }), new(0, new Dictionary<double, double> { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new(0, new Dictionary<double, double> { { 0, 0.549503146925505 }, { 1, 0.450496853074495 }, }), new(0, new Dictionary<double, double> { { 0, 0.537653803214063 }, { 1, 0.462346196785938 }, }), new(1, new Dictionary<double, double> { { 0, 0.37650723540928 }, { 1, 0.62349276459072 }, }), new(0, new Dictionary<double, double> { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(0, new Dictionary<double, double> { { 0, 0.524371409810479 }, { 1, 0.475628590189522 }, }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary<double, double> { { 0, 0.471117379964633 }, { 1, 0.528882620035367 }, }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary<double, double> { { 0, 0.404976804073458 }, { 1, 0.595023195926542 }, }), new(0, new Dictionary<double, double> { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), };
var expected = new ProbabilityPrediction[] { new(0, new Dictionary<double, double> { { 0, 0.553917222019051 }, { 1, 0.446082777980949 } }), new(1, new Dictionary<double, double> { { 0, 0.455270122123639 }, { 1, 0.544729877876361 } }), new(0, new Dictionary<double, double> { { 0, 0.590671208378385 }, { 1, 0.409328791621616 } }), new(0, new Dictionary<double, double> { { 0, 0.564961572849738 }, { 1, 0.435038427150263 } }), new(1, new Dictionary<double, double> { { 0, 0.455270122123639 }, { 1, 0.544729877876361 } }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(1, new Dictionary<double, double> { { 0, 0.417527839140627 }, { 1, 0.582472160859373 } }), new(1, new Dictionary<double, double> { { 0, 0.409988559960094 }, { 1, 0.590011440039906 } }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary<double, double> { { 0, 0.461264944069783 }, { 1, 0.538735055930217 } }), new(0, new Dictionary<double, double> { { 0, 0.590671208378385 }, { 1, 0.409328791621616 } }), new(0, new Dictionary<double, double> { { 0, 0.549503146925505 }, { 1, 0.450496853074495 } }), new(0, new Dictionary<double, double> { { 0, 0.537653803214063 }, { 1, 0.462346196785938 } }), new(1, new Dictionary<double, double> { { 0, 0.37650723540928 }, { 1, 0.62349276459072 } }), new(0, new Dictionary<double, double> { { 0, 0.573579890413618 }, { 1, 0.426420109586382 } }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(0, new Dictionary<double, double> { { 0, 0.524371409810479 }, { 1, 0.475628590189522 } }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary<double, double> { { 0, 0.471117379964633 }, { 1, 0.528882620035367 } }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary<double, double> { { 0, 0.404976804073458 }, { 1, 0.595023195926542 } }), new(0, new Dictionary<double, double> { { 0, 0.573579890413618 }, { 1, 0.426420109586382 } }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }) };
CollectionAssert.AreEqual(expected, actual);
}

Expand All @@ -100,14 +100,14 @@ public void ClassificationAdaBoostModel_GetVariableImportance()
var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet();

var featureNameToIndex = new Dictionary<string, int> { { "AptitudeTestScore", 0 },
{ "PreviousExperience_month", 1 } };
{ "PreviousExperience_month", 1 }, };

var learner = new ClassificationAdaBoostLearner(10, 1, 3);
var sut = learner.Learn(observations, targets);

var actual = sut.GetVariableImportance(featureNameToIndex);
var expected = new Dictionary<string, double> { { "PreviousExperience_month", 100.0 },
{ "AptitudeTestScore", 24.0268096428771 } };
{ "AptitudeTestScore", 24.0268096428771 }, };

Assert.AreEqual(expected.Count, actual.Count);
var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a });
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,14 +56,14 @@ public void RegressionAdaBoostModel_GetVariableImportance()
var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet();

var featureNameToIndex = new Dictionary<string, int> { { "AptitudeTestScore", 0 },
{ "PreviousExperience_month", 1 } };
{ "PreviousExperience_month", 1 }, };

var learner = new RegressionAdaBoostLearner(10);
var sut = learner.Learn(observations, targets);

var actual = sut.GetVariableImportance(featureNameToIndex);
var expected = new Dictionary<string, double> { { "PreviousExperience_month", 100.0 },
{ "AptitudeTestScore", 33.8004886838701 } };
{ "AptitudeTestScore", 33.8004886838701 }, };

Assert.AreEqual(expected.Count, actual.Count);
var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a });
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,5 +18,5 @@ public enum AdaBoostRegressionLoss
/// <summary>
/// Exponential loss
/// </summary>
Exponential
Exponential,
}
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ public sealed class ClassificationAdaBoostLearner
/// </summary>
/// <param name="iterations">Number of iterations (models) to boost</param>
/// <param name="learningRate">How much each boost iteration should add (between 1.0 and 0.0)</param>
/// <param name="maximumTreeDepth">The maximum depth of the tree models.
/// for 2 class problem 1 is usually enough. For more classes or larger problems between 3 to 8 is recommended.
/// <param name="maximumTreeDepth">The maximum depth of the tree models.
/// for 2 class problem 1 is usually enough. For more classes or larger problems between 3 to 8 is recommended.
/// 0 will set the depth equal to the number of classes in the problem</param>
/// <param name="minimumSplitSize">minimum node split size in the trees 1 is default</param>
/// <param name="minimumInformationGain">The minimum improvement in information gain before a split is made</param>
Expand Down Expand Up @@ -139,27 +139,35 @@ public ClassificationAdaBoostModel Learn(F64Matrix observations, double[] target
for (var i = 0; i < m_iterations; i++)
{
if (!Boost(observations, targets, indices, i))
{
break;
}

var ensembleError = ErrorEstimate(observations, indices);

if (ensembleError == 0.0)
{
break;
}

if (m_modelErrors[i] == 0.0)
{
break;
}

var weightSum = m_sampleWeights.Sum(indices);
if (weightSum <= 0.0)
{
break;
}

if (i == m_iterations - 1)
{
// Normalize weights
for (var j = 0; j < indices.Length; j++)
{
var index = indices[j];
m_sampleWeights[index] = m_sampleWeights[index] / weightSum;
m_sampleWeights[index] /= weightSum;
}
}
}
Expand Down
18 changes: 12 additions & 6 deletions src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
namespace SharpLearning.AdaBoost.Learners;

/// <summary>
/// Regression AdaBoost learner using the R2 algorithm
/// Regression AdaBoost learner using the R2 algorithm
/// using weighted sampling to target the observations with largest error and
/// weighted median to ensemble the models.
/// </summary>
Expand Down Expand Up @@ -44,13 +44,13 @@ public sealed class RegressionAdaBoostLearner : IIndexedLearner<double>, ILearne
readonly WeightedRandomSampler m_sampler;

/// <summary>
/// Regression AdaBoost learner using the R2 algorithm
/// Regression AdaBoost learner using the R2 algorithm
/// using weighted sampling to target the observations with largest error and
/// weighted median to ensemble the models.
/// </summary>
/// <param name="iterations">Number of iterations (models) to boost</param>
/// <param name="learningRate">How much each boost iteration should add (between 1.0 and 0.0)</param>
/// <param name="maximumTreeDepth">The maximum depth of the tree models.
/// <param name="maximumTreeDepth">The maximum depth of the tree models.
/// 0 will set the depth to default 3</param>
/// <param name="loss">Type of loss used when boosting weights. Linear is default</param>
/// <param name="minimumSplitSize">minimum node split size in the trees 1 is default</param>
Expand Down Expand Up @@ -137,27 +137,35 @@ public RegressionAdaBoostModel Learn(F64Matrix observations, double[] targets,
for (var i = 0; i < m_iterations; i++)
{
if (!Boost(observations, targets, indices, i))
{
break;
}

var ensembleError = ErrorEstimate(observations, indices);

if (ensembleError == 0.0)
{
break;
}

if (m_modelErrors[i] == 0.0)
{
break;
}

var weightSum = m_sampleWeights.Sum(indices);
if (weightSum <= 0.0)
{
break;
}

if (i == m_iterations - 1)
{
// Normalize weights
for (var j = 0; j < indices.Length; j++)
{
var index = indices[j];
m_sampleWeights[index] = m_sampleWeights[index] / weightSum;
m_sampleWeights[index] /= weightSum;
}
}
}
Expand Down Expand Up @@ -195,7 +203,6 @@ bool Boost(F64Matrix observations, double[] targets, int[] indices, int iteratio
var model = m_modelLearner.Learn(observations, targets,
m_sampleIndices); // weighted sampling is used instead of weights in training


var predictions = model.Predict(observations, indices);

for (var i = 0; i < predictions.Length; i++)
Expand All @@ -208,7 +215,6 @@ bool Boost(F64Matrix observations, double[] targets, int[] indices, int iteratio

for (var i = 0; i < m_workErrors.Length; i++)
{

var error = m_workErrors[i];

if (maxError != 0.0)
Expand Down
8 changes: 4 additions & 4 deletions src/SharpLearning.AdaBoost/WeightedRandomSampler.cs
Original file line number Diff line number Diff line change
Expand Up @@ -47,26 +47,26 @@ public void Sample(int[] indices, double[] weights, int[] outIndices)
var totalWeight = weights.Sum(indices);
var i = 0;

var index = indices.First();
var index = indices[0];
var weight = weights[index];

var samples = outIndices.Length;
var current = 0;

while (samples > 0)
{
var x = totalWeight * (1.0 - Math.Pow(m_random.NextDouble(), (1.0 / samples)));
var x = totalWeight * (1.0 - Math.Pow(m_random.NextDouble(), 1.0 / samples));
totalWeight -= x;
while (x > weight)
{
x -= weight;
i += 1;
i++;
index = indices[i];
weight = weights[index];
}
weight -= x;
outIndices[current++] = index;
samples -= 1;
samples--;
}
}
}
2 changes: 1 addition & 1 deletion src/SharpLearning.Common.Interfaces/IIndexedLearner.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
namespace SharpLearning.Common.Interfaces;

/// <summary>
/// Interface for indexed learner.
/// Interface for indexed learner.
/// Only the observations from the provided indices in the index array will be used for training
/// </summary>
/// <typeparam name="TPrediction">The prediction type of the resulting model.</typeparam>
Expand Down
Loading