Skip to content

Commit

Permalink
added weighted graphs and HNSW implementation
Browse files Browse the repository at this point in the history
  • Loading branch information
Jack Dermody committed Aug 1, 2024
1 parent 097022d commit 01c9957
Show file tree
Hide file tree
Showing 22 changed files with 1,936 additions and 544 deletions.
13 changes: 13 additions & 0 deletions BrightData.UnitTests/FixedSizeArrayTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -40,5 +40,18 @@ public void TestDescending()
array.Size.Should().Be(1);
array.MaxValue.Should().Be(1);
}

[Fact]
public void TestSingle()
{
var array = new FixedSizeSortedAscending1Array<uint, float>();
array.TryAdd(1, 0.2f).Should().BeTrue();
array.TryAdd(2, 0.3f).Should().BeFalse();
array.TryAdd(3, 0.1f).Should().BeTrue();
array.MaxValue.Should().Be(array.MinValue);
array.MaxWeight.Should().Be(array.MinWeight);
array.RemoveAt(0);
array.Size.Should().Be(0);
}
}
}
30 changes: 15 additions & 15 deletions BrightData.UnitTests/MatrixTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -539,7 +539,7 @@ public void MatrixMultiplyEachColumnWith()
[Fact]
public void MatrixSigmoidActivation()
{
var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
using var a = _cpu.CreateMatrix(3, 7, (j, k) => Convert.ToSingle(normalDistribution.Sample()));
using var cpu = a.Sigmoid();
using var gpu = Apply(_cuda, a, (a) => a.Sigmoid());
Expand All @@ -550,7 +550,7 @@ public void MatrixSigmoidActivation()
[Fact]
public void MatrixSigmoidDerivative()
{
var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
using var a = _cpu.CreateMatrix(3, 7, (j, k) => Convert.ToSingle(normalDistribution.Sample()));
using var cpu = a.SigmoidDerivative();
using var gpu = Apply(_cuda, a, (a) => a.SigmoidDerivative());
Expand All @@ -561,7 +561,7 @@ public void MatrixSigmoidDerivative()
[Fact]
public void MatrixTanhActivation()
{
var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
using var a = _cpu.CreateMatrix(3, 7, (j, k) => Convert.ToSingle(normalDistribution.Sample()));
using var cpu = a.Tanh();
using var gpu = Apply(_cuda, a, a => a.Tanh());
Expand All @@ -572,7 +572,7 @@ public void MatrixTanhActivation()
[Fact]
public void MatrixTanhDerivative()
{
var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
using var a = _cpu.CreateMatrix(3, 7, (j, k) => Convert.ToSingle(normalDistribution.Sample()));
using var cpu = a.TanhDerivative();
using var gpu = Apply(_cuda, a, a => a.TanhDerivative());
Expand All @@ -583,7 +583,7 @@ public void MatrixTanhDerivative()
[Fact]
public void MatrixReluActivation()
{
var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
using var a = _cpu.CreateMatrix(3, 7, (j, k) => Convert.ToSingle(normalDistribution.Sample()));
using var cpu = a.Relu();
using var gpu = Apply(_cuda, a, a => a.Relu());
Expand All @@ -594,7 +594,7 @@ public void MatrixReluActivation()
[Fact]
public void MatrixReluDerivative()
{
var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
using var a = _cpu.CreateMatrix(3, 7, (j, k) => Convert.ToSingle(normalDistribution.Sample()));
using var cpu = a.ReluDerivative();
using var gpu = Apply(_cuda, a, a => a.ReluDerivative());
Expand All @@ -605,7 +605,7 @@ public void MatrixReluDerivative()
[Fact]
public void MatrixLeakyReluActivation()
{
var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
using var a = _cpu.CreateMatrix(3, 7, (j, k) => Convert.ToSingle(normalDistribution.Sample()));
using var cpu = a.LeakyRelu();
using var gpu = Apply(_cuda, a, a => a.LeakyRelu());
Expand All @@ -616,7 +616,7 @@ public void MatrixLeakyReluActivation()
[Fact]
public void MatrixLeakyReluDerivative()
{
var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
using var a = _cpu.CreateMatrix(3, 7, (j, k) => Convert.ToSingle(normalDistribution.Sample()));
using var cpu = a.LeakyReluDerivative();
using var gpu = Apply(_cuda, a, a => a.LeakyReluDerivative());
Expand All @@ -627,7 +627,7 @@ public void MatrixLeakyReluDerivative()
[Fact]
public void MatrixSoftmaxActivation()
{
var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
using var a = _cpu.CreateMatrix(3, 7, (j, k) => Convert.ToSingle(normalDistribution.Sample()));
using var cpu = a.Softmax();
using var gpu = Apply(_cuda, a, a => a.Softmax());
Expand All @@ -638,7 +638,7 @@ public void MatrixSoftmaxActivation()
[Fact]
public void MatrixSoftmaxPerRow()
{
var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
using var a = _cpu.CreateMatrix(3, 7, (j, k) => Convert.ToSingle(normalDistribution.Sample()));
var cpu = a.SoftmaxPerRow().Select(x => _cpu.CreateVector(x)).ToArray();
var gpu = Apply(_cuda, a, a => a.SoftmaxPerRow().Select(x => _cuda.CreateVector(x)).ToArray());
Expand All @@ -649,7 +649,7 @@ public void MatrixSoftmaxPerRow()
[Fact]
public void MatrixRowVector2()
{
var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
using var a = _cpu.CreateMatrix(3, 7, (j, k) => Convert.ToSingle(normalDistribution.Sample()));
var cpu = 3.AsRange().Select(a.GetRowVector).ToArray();
var gpu = Apply(_cuda, a, a => 3.AsRange().Select(a.GetRowVector).ToArray());
Expand All @@ -660,7 +660,7 @@ public void MatrixRowVector2()
[Fact]
public void MatrixRowVectorsTransposed()
{
var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
using var a = _cpu.CreateMatrix(3, 7, (j, k) => Convert.ToSingle(normalDistribution.Sample()));
using var aT = a.Transpose();
var cpu = 3.AsRange().Select(aT.GetColumnVector).ToArray();
Expand All @@ -672,7 +672,7 @@ public void MatrixRowVectorsTransposed()
[Fact]
public void MatrixSoftmaxDerivative()
{
var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
using var a = _cpu.CreateMatrix(3, 7, (j, k) => Convert.ToSingle(normalDistribution.Sample()));
var cpu = _cpu.SoftmaxDerivative(a.Segment);
var gpu = Apply(_cuda, a, a => _cuda.SoftmaxDerivative(a.Segment));
Expand All @@ -683,7 +683,7 @@ public void MatrixSoftmaxDerivative()
[Fact]
public void MatrixSoftmaxPerRowDerivative()
{
var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
using var a = _cpu.CreateMatrix(3, 7, (j, k) => Convert.ToSingle(normalDistribution.Sample()));
var cpu = a.SoftmaxPerRow();
var gpu = Apply(_cuda, a, a => a.SoftmaxPerRow());
Expand Down Expand Up @@ -961,7 +961,7 @@ public void MatrixPow()
[Fact]
public void MatrixConstrain()
{
var distribution = _context.CreateNormalDistribution(0, 5);
var distribution = _context.CreateNormalDistribution<float>(0, 5);
using var cpu = _cpu.CreateMatrix(100, 100, (x, y) => Convert.ToSingle(distribution.Sample()));
using var gpu = Apply(_cuda, cpu, a => a.ConstrainInPlace(-2f, 2f));
using var mkl = Apply(_mkl, cpu, a => a.ConstrainInPlace(-2f, 2f));
Expand Down
12 changes: 6 additions & 6 deletions BrightData.UnitTests/TensorTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ public void TensorAddPadding2()

void CheckTensorIm2Col(uint rows, uint columns, uint depth, uint filterWidth, uint filterHeight, uint xStride, uint yStride, bool randomData)
{
var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
using var cpuTensor = _cpu.CreateTensor3D(depth.AsRange().Select(i => _cpu.CreateMatrix(rows, columns, (j, k) => randomData
? Convert.ToSingle(normalDistribution.Sample())
: Convert.ToSingle((i + 1) * (j + 1) * (k + 1))
Expand Down Expand Up @@ -318,7 +318,7 @@ public void TensorMaxPoolBlankIrregular()

void CheckTensorMaxPool(uint rows, uint columns, uint depth, uint filterWidth, uint filterHeight, uint xStride, uint yStride, bool randomInit, bool calculateIndices)
{
var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
using var cpuTensor = _cpu.CreateTensor3D(depth.AsRange().Select(i => _cpu.CreateMatrix(rows, columns, (j, k) => randomInit
? Convert.ToSingle(normalDistribution.Sample())
: Convert.ToSingle((i + 1) * (j + 1) * (k + 1))
Expand Down Expand Up @@ -359,7 +359,7 @@ void CheckTensorMaxPool(uint rows, uint columns, uint depth, uint filterWidth, u

void CheckTensorReverseIm2Col(uint filterWidth, uint filterHeight, uint xStride, uint yStride, uint depth, uint filterCount, uint inputWidth, uint inputHeight)
{
var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
using var cpuTensor = _cpu.CreateTensor3D(depth.AsRange().Select(_ => _cpu.CreateMatrix(inputHeight, inputWidth, (_, _) => Convert.ToSingle(normalDistribution.Sample()))).ToArray());
using var im2Col = cpuTensor.Im2Col(filterWidth, filterHeight, xStride, yStride);
using var cpuFilter = _cpu.CreateMatrix(depth * filterWidth * filterHeight, filterCount, (_, _) => normalDistribution.Sample());
Expand Down Expand Up @@ -793,7 +793,7 @@ public void Tensor3DToFloatTensor()
[Fact]
public void Tensor3DTransposeThisAndMultiply()
{
var normalDistribution = _context.CreateNormalDistribution();
var normalDistribution = _context.CreateNormalDistribution<float>();
var tensor1 = CheckCreateTensor(9, 3, 3, (_, _, _) => normalDistribution.Sample());
var data = 3.AsRange().Select(_ => CheckCreateTensor(3, 3, 3, (i, j, k) => (i + 1) * (j + 1) * (k + 1))).ToArray();

Expand All @@ -814,7 +814,7 @@ public void Tensor3DTransposeThisAndMultiply()
[Fact]
public void Tensor3DMultiply()
{
var normalDistribution = _context.CreateNormalDistribution();
var normalDistribution = _context.CreateNormalDistribution<float>();
var tensor1 = CheckCreateTensor(3, 9, 3, (_, _, _) => normalDistribution.Sample());
var data = 3.AsRange().Select(_ => CheckCreateTensor(3, 3, 3, (i, j, k) => (i + 1) * (j + 1) * (k + 1))).ToArray();

Expand Down Expand Up @@ -900,7 +900,7 @@ public void Tensor4DReverseIm2Col()
{
const int rows = 4, columns = 4, depth = 1, count = 1, filterWidth = 2, filterHeight = 2, filterCount = 1, xStride = 2, yStride = 2;

var normalDistribution = _context.CreateNormalDistribution(0, 1);
var normalDistribution = _context.CreateNormalDistribution<float>(0, 1);
var data = Enumerable.Range(0, count)
.Select(_ => CheckCreateTensor(rows, columns, depth, (_, _, _) => normalDistribution.Sample())).ToArray();
using var cpuTensor = _cpu.CreateTensor4D(data);
Expand Down
32 changes: 16 additions & 16 deletions BrightData.UnitTests/VectorTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ public void TestVectorCreation()

void TestDistances(DistanceMetric distanceMetric)
{
var distribution = _context.CreateNormalDistribution(0, 5);
var distribution = _context.CreateNormalDistribution<float>(0, 5);
var vectors = Enumerable.Range(0, 10).Select(_ => (IReadOnlyNumericSegment<float>)_cpu.CreateSegment(100, _ => distribution.Sample())).ToArray();
var compareTo = Enumerable.Range(0, 20).Select(_ => (IReadOnlyNumericSegment<float>)_cpu.CreateSegment(100, _ => distribution.Sample())).ToArray();

Expand Down Expand Up @@ -313,7 +313,7 @@ public void VectorMultiply()
[Fact]
public void VectorEuclideanDistance()
{
var distribution = _context.CreateNormalDistribution(0, 5);
var distribution = _context.CreateNormalDistribution<float>(0, 5);

using var a = _cpu.CreateVector(500, _ => distribution.Sample());
using var b = _cpu.CreateVector(500, _ => distribution.Sample());
Expand All @@ -340,7 +340,7 @@ public void VectorCosineDistance()
[Fact]
public void VectorManhattanDistance()
{
var distribution = _context.CreateNormalDistribution(0, 5);
var distribution = _context.CreateNormalDistribution<float>(0, 5);

using var a = _cpu.CreateVector(5000, _ => distribution.Sample());
using var b = _cpu.CreateVector(5000, _ => distribution.Sample());
Expand All @@ -354,7 +354,7 @@ public void VectorManhattanDistance()
[Fact]
public void VectorMeanSquaredDistance()
{
var distribution = _context.CreateNormalDistribution(0, 5);
var distribution = _context.CreateNormalDistribution<float>(0, 5);

using var a = _cpu.CreateVector(1000, _ => distribution.Sample());
using var b = _cpu.CreateVector(1000, _ => distribution.Sample());
Expand All @@ -367,7 +367,7 @@ public void VectorMeanSquaredDistance()
[Fact]
public void VectorSquaredEuclideanDistance()
{
var distribution = _context.CreateNormalDistribution(0, 5);
var distribution = _context.CreateNormalDistribution<float>(0, 5);

using var a = _cpu.CreateVector(1000, _ => distribution.Sample());
using var b = _cpu.CreateVector(1000, _ => distribution.Sample());
Expand All @@ -381,7 +381,7 @@ public void VectorSquaredEuclideanDistance()
[Fact]
public void VectorAverage()
{
var distribution = _context.CreateNormalDistribution(0, 5);
var distribution = _context.CreateNormalDistribution<float>(0, 5);

using var a = _cpu.CreateVector(5000, _ => distribution.Sample());
var cpu = a.Average();
Expand All @@ -393,7 +393,7 @@ public void VectorAverage()
[Fact]
public void VectorL1Norm()
{
var distribution = _context.CreateNormalDistribution(0, 5);
var distribution = _context.CreateNormalDistribution<float>(0, 5);

using var a = _cpu.CreateVector(5000, _ => distribution.Sample());
var cpu = a.L1Norm();
Expand All @@ -405,7 +405,7 @@ public void VectorL1Norm()
[Fact]
public void VectorAbs()
{
var distribution = _context.CreateNormalDistribution(0, 5);
var distribution = _context.CreateNormalDistribution<float>(0, 5);

using var a = _cpu.CreateVector(5000, _ => distribution.Sample());
var cpu = a.Abs();
Expand All @@ -417,7 +417,7 @@ public void VectorAbs()
[Fact]
public void VectorLog()
{
var distribution = _context.CreateNormalDistribution(0, 5);
var distribution = _context.CreateNormalDistribution<float>(0, 5);

using var a = _cpu.CreateVector(5000, _ => distribution.Sample());
var cpu = a.Log();
Expand All @@ -430,7 +430,7 @@ public void VectorLog()
[Fact]
public void VectorStdDev()
{
var distribution = _context.CreateNormalDistribution(0, 5);
var distribution = _context.CreateNormalDistribution<float>(0, 5);

using var a = _cpu.CreateVector(5000, _ => distribution.Sample());
var cpu = a.StdDev(null);
Expand Down Expand Up @@ -493,7 +493,7 @@ static IVector<float> TestMultiDistance(LinearAlgebraProvider<float> lap, IVecto
[Fact]
public void MultiEuclideanDistance()
{
var distribution = _context.CreateNormalDistribution(0, 5);
var distribution = _context.CreateNormalDistribution<float>(0, 5);

using var a = _cpu.CreateVector(5000, _ => distribution.Sample());
using var b = _cpu.CreateVector(5000, _ => distribution.Sample());
Expand All @@ -507,7 +507,7 @@ public void MultiEuclideanDistance()
[Fact]
public void MultiManhattanDistance()
{
var distribution = _context.CreateNormalDistribution(0, 5);
var distribution = _context.CreateNormalDistribution<float>(0, 5);

using var a = _cpu.CreateVector(5000, _ => distribution.Sample());
using var b = _cpu.CreateVector(5000, _ => distribution.Sample());
Expand All @@ -520,7 +520,7 @@ public void MultiManhattanDistance()
[Fact]
public void MultiCosineDistance()
{
var distribution = _context.CreateNormalDistribution(0, 5);
var distribution = _context.CreateNormalDistribution<float>(0, 5);

using var a = _cpu.CreateVector(5000, _ => distribution.Sample());
using var b = _cpu.CreateVector(5000, _ => distribution.Sample());
Expand Down Expand Up @@ -623,7 +623,7 @@ public void VectorSplit()
[Fact]
public void VectorSoftMax()
{
var distribution = _context.CreateNormalDistribution(0, 5);
var distribution = _context.CreateNormalDistribution<float>(0, 5);
using var a = _cpu.CreateVector(128, _ => distribution.Sample());
using var cpu = a.Softmax();

Expand All @@ -635,7 +635,7 @@ public void VectorSoftMax()
[Fact]
public void VectorSoftMaxDerivative()
{
var distribution = _context.CreateNormalDistribution(0, 5);
var distribution = _context.CreateNormalDistribution<float>(0, 5);
using var a = _cpu.CreateVector(128, _ => distribution.Sample());
using var cpu = a.SoftmaxDerivative();

Expand All @@ -650,7 +650,7 @@ public void VectorSoftMaxDerivative()
[Fact]
public void VectorReverse()
{
var distribution = _context.CreateNormalDistribution(0, 5);
var distribution = _context.CreateNormalDistribution<float>(0, 5);
var a = _cpu.CreateVector(128, _ => distribution.Sample());
var cpu = a.Reverse();

Expand Down
Loading

0 comments on commit 01c9957

Please sign in to comment.