Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions .editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -379,3 +379,13 @@ dotnet_naming_style.s_camelcase.required_prefix = s_
dotnet_naming_style.s_camelcase.required_suffix =
dotnet_naming_style.s_camelcase.word_separator =
dotnet_naming_style.s_camelcase.capitalization = camel_case


# Docs
dotnet_diagnostic.CS1591.severity = suggestion

# cleanup

dotnet_diagnostic.IDE0001.severity = warning
dotnet_diagnostic.IDE0004.severity = warning
dotnet_diagnostic.IDE0005.severity = warning
7 changes: 7 additions & 0 deletions Directory.Build.props
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
<Project>
<PropertyGroup>
<EnforceCodeStyleInBuild>true</EnforceCodeStyleInBuild>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
</Project>
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
using Example.SentimentInference.Model;
using FAI.Core;
using FAI.Core.Abstractions;
using Microsoft.AspNetCore.Mvc;
using Scalar.AspNetCore;
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# FAI - Fast AI on A Budget

[![Build Status](https://github.com/tjwald/FAI/actions/workflows/pr-check.yml/badge.svg)](https://github.com/tjwald/FAI/actions/workflows/publish-nuget.yml)
[![Build Status](https://github.com/tjwald/FAI/actions/workflows/publish-nuget.yml/badge.svg)](https://github.com/tjwald/FAI/actions/workflows/publish-nuget.yml)
[![NuGet](https://img.shields.io/nuget/v/FAI.Core.svg)](https://www.nuget.org/packages/FAI.Core)
[![License](https://img.shields.io/github/license/tjwald/FAI)](https://github.com/tjwald/FAI/blob/develop/LICENSE)
Expand Down
5 changes: 5 additions & 0 deletions global.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"test": {
"runner": "Microsoft.Testing.Platform"
}
}
2 changes: 1 addition & 1 deletion src/FAI.Core.Extensions.DI/BuilderExtensions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ public static IServiceCollection AddConfigurationAndBind<TConfiguration>(this IS
services.AddOptionsWithValidateOnStart<TConfiguration>()
.BindConfiguration(section)
.ValidateDataAnnotations();
services.AddSingleton<TConfiguration>(sp => sp.GetRequiredService<IOptions<TConfiguration>>().Value);
services.AddSingleton(sp => sp.GetRequiredService<IOptions<TConfiguration>>().Value);

return services;
}
Expand Down
4 changes: 2 additions & 2 deletions src/FAI.Core.Extensions.DI/FAIBuilderExtensions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ public PipelineBuilder<TInput, TOutput> AddPipeline<TInput, TOutput>()
{
var pipelineBuilder = new PipelineBuilder<TInput, TOutput>(services);

services.AddSingleton<IPipeline<TInput, TOutput>>(sp => pipelineBuilder.Build(sp));
services.AddSingleton(sp => pipelineBuilder.Build(sp));

return pipelineBuilder;
}
Expand All @@ -25,7 +25,7 @@ public PipelineBuilder<TInput, TOutput> UsePartitioning(Action<PartitionBatchExe
{
var partitionBuilder = new PartitionBatchExecutorBuilder<TInput, TOutput>(builder.ServiceCollection);
configure(partitionBuilder);
return builder.Use<PartitionPipelineBatchExecutor<TInput, TOutput>>(
return builder.Use(
(next, sp) => new PartitionPipelineBatchExecutor<TInput, TOutput>(partitionBuilder.BuildSchedular(sp), partitionBuilder.BuildSlicer(sp), next));
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/FAI.Core.Extensions.DI/LocalServiceCollection.cs
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ private TService ServiceFactory<TService>(IServiceProvider sp, object? o) where

if (descriptor.ImplementationType is not null)
{
return (TService)ActivatorUtilities.CreateInstance(_serviceProvider, descriptor.ImplementationType)!;
return (TService)ActivatorUtilities.CreateInstance(_serviceProvider, descriptor.ImplementationType);
}

if (descriptor.ImplementationFactory is not null)
Expand Down
6 changes: 3 additions & 3 deletions src/FAI.Core.Extensions.DI/PipelineBuilder.cs
Original file line number Diff line number Diff line change
Expand Up @@ -41,18 +41,18 @@ public PipelineBuilder<TInput, TOutput> AddInferenceSteps<TInferenceSteps>()
return this;
}

public PipelineBuilder<TInput, TOutput> UsePipeline<TPipeline>(Func<IServiceProvider, IPipelineBatchExecutor<TInput, TOutput>, IPipeline<TInput, TOutput>> factory) where TPipeline : IPipeline<TInput, TOutput>
public PipelineBuilder<TInput, TOutput> UsePipeline(Func<IServiceProvider, IPipelineBatchExecutor<TInput, TOutput>, IPipeline<TInput, TOutput>> factory)
{
_pipelineFactory = factory;
return this;
}

public PipelineBuilder<TInput, TOutput> Use<TBatchExecutor>() where TBatchExecutor : IPipelineBatchExecutor<TInput, TOutput>
{
return this.Use<TBatchExecutor>((next, sp) => ActivatorUtilities.CreateInstance<TBatchExecutor>(sp, next));
return Use((next, sp) => ActivatorUtilities.CreateInstance<TBatchExecutor>(sp, next));
}

public PipelineBuilder<TInput, TOutput> Use<TBatchExecutor>(Func<IPipelineBatchExecutor<TInput, TOutput>, IServiceProvider, IPipelineBatchExecutor<TInput, TOutput>> factory) where TBatchExecutor : IPipelineBatchExecutor<TInput, TOutput>
public PipelineBuilder<TInput, TOutput> Use(Func<IPipelineBatchExecutor<TInput, TOutput>, IServiceProvider, IPipelineBatchExecutor<TInput, TOutput>> factory)
{
_batchExecutorFactories.Add(factory);
return this;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ public static ClassificationResult<TClassification, TScore> GetClassificationRes
{
Span<TScore> probabilities = stackalloc TScore[logits.Length];
TensorPrimitives.SoftMax(logits, probabilities);
int argmax = TensorPrimitives.IndexOfMax<TScore>(probabilities);
int argmax = TensorPrimitives.IndexOfMax(probabilities);
var score = probabilities[argmax];

TScore[]? logitsArray = classificationOptions.StoreLogits ? logits.ToArray() : null;
Expand Down
2 changes: 1 addition & 1 deletion src/FAI.NLP.Extensions.DI/BatchExecutorExtensions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ public static class BatchExecutorExtensions
public PipelineBuilder<TInput, TOutput> UseTokenSorting(TokenCountSortingBatchExecutorOptions? options = null)
{
options ??= new();
return builder.Use<TokenCountSortingBatchExecutor<TInput, TOutput>>((next, sp)
return builder.Use((next, sp)
=> ActivatorUtilities.CreateInstance<TokenCountSortingBatchExecutor<TInput, TOutput>>(sp, next, options));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ public override BatchTokenizedResult Preprocess(ReadOnlySpan<TextMultipleChoiceI
var tokenRow = tokenTensorSpan[outputRow].AsSpan();
var maskRow = maskTensorSpan[outputRow].AsSpan();

TensorPrimitives.ConvertChecked<int, long>(CollectionsMarshal.AsSpan(tokens), tokenRow);
TensorPrimitives.ConvertChecked(CollectionsMarshal.AsSpan(tokens), tokenRow);

maskRow[..tokens.Count].Fill(1);
outputRow++;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ public async Task ExecuteBatchPredict(ReadOnlyMemory<TToken> inputs, Memory<TOut

var tokenComparer = new TokenCountComparer<TToken>(_options.Ascending);

MemoryExtensions.Sort<TToken, int, TokenCountComparer<TToken>>(inputsSorted, inputsSortedIndices, tokenComparer);
MemoryExtensions.Sort(inputsSorted, inputsSortedIndices, tokenComparer);
await _executor.ExecuteBatchPredict(inputsSorted, outputSpan);
MemoryExtensions.Sort<int, TOutput>(inputsSortedIndices, outputSpan.Span);
MemoryExtensions.Sort(inputsSortedIndices, outputSpan.Span);
}
}
4 changes: 2 additions & 2 deletions src/FAI.NLP/Tokenization/PretrainedTokenizer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ IEnumerator IEnumerable.GetEnumerator()

/// <summary>
/// Represents a pretrained tokenizer used for tokenizing text inputs and managing token-related transformations.
/// Wraps a <see cref="Microsoft.ML.Tokenizers.Tokenizer"/> and adds batch functionality.
/// Wraps a <see cref="Tokenizer"/> and adds batch functionality.
/// </summary>
public sealed class PretrainedTokenizer
{
Expand Down Expand Up @@ -208,7 +208,7 @@ private static void TokenizeRow(PretrainedTokenizerOptions tokenizerOptions, Lis
Span<long> tokenizationRowSpan = tokenizationSpan[i].AsSpan();
Span<long> maskRowSpan = maskSpan[i].AsSpan();

TensorPrimitives.ConvertChecked<int, long>(tokenizedInput, tokenizationRowSpan);
TensorPrimitives.ConvertChecked(tokenizedInput, tokenizationRowSpan);

if (tokenizerOptions.PaddingToken != 0) // No need - initialized to 0
{
Expand Down
2 changes: 1 addition & 1 deletion src/FAI.Onnx/FAI.Onnx.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
</ItemGroup>

<ItemGroup>
<PackageReference Include="Microsoft.ML.OnnxRuntime.Managed" Version="1.24.1" />
<PackageReference Include="Microsoft.ML.OnnxRuntime.Managed" Version="1.23.2" />
<ProjectReference Include="..\FAI.Core\FAI.Core.csproj" />
</ItemGroup>

Expand Down
2 changes: 1 addition & 1 deletion test/Directory.Build.props
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
</PropertyGroup>

<ItemGroup>
<PackageReference Include="xunit.v3" Version="1.1.0" />
<PackageReference Include="xunit.v3" Version="3.2.2" />
<PackageReference Include="NSubstitute" Version="5.3.0" />
</ItemGroup>

Expand Down
7 changes: 3 additions & 4 deletions test/FAI.Core.Extensions.DI.Tests/PipelineBuilderTests.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
using FAI.Core.Abstractions;
using FAI.Core.PipelineBatchExecutors;
using Microsoft.Extensions.DependencyInjection;

namespace FAI.Core.Extensions.DI.Tests;
Expand Down Expand Up @@ -30,8 +29,8 @@ public async Task Build_MaintainsExpectedChainOrder()
_services.AddSingleton(tracker);

var builder = new PipelineBuilder<string, int>(_services);
builder.Use<OrderTrackingExecutor>((next, sp) => new OrderTrackingExecutor(next, "First", sp.GetRequiredService<List<string>>()));
builder.Use<OrderTrackingExecutor>((next, sp) => new OrderTrackingExecutor(next, "Second", sp.GetRequiredService<List<string>>()));
builder.Use((next, sp) => new OrderTrackingExecutor(next, "First", sp.GetRequiredService<List<string>>()));
builder.Use((next, sp) => new OrderTrackingExecutor(next, "Second", sp.GetRequiredService<List<string>>()));
builder.UseSink<OrderTrackingSink>(sp => new OrderTrackingSink("Sink", sp.GetRequiredService<List<string>>()));

var sp = _services.BuildServiceProvider();
Expand Down Expand Up @@ -75,7 +74,7 @@ public void AddModelExecutor_RegistersFactory()
var mockExecutor = Substitute.For<IModelExecutor<string, int>>();

// Act
builder.AddModelExecutor<string, int>(_ => mockExecutor);
builder.AddModelExecutor(_ => mockExecutor);

// Assert
var sp = _services.BuildServiceProvider();
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
using FAI.Core.Abstractions;
using FAI.Core.BatchSchedulers;
using FAI.Core.Configurations.PipelineBatchExecutors;
using NSubstitute;

namespace FAI.Core.Tests.BatchSchedularTests;

Expand Down Expand Up @@ -35,7 +34,7 @@ public async Task RunInExecutor_RespectsConcurrencyLimit()

int activeTasks = 0;
int maxSeenActiveTasks = 0;
var lockObj = new System.Threading.Lock();
var lockObj = new Lock();

executor.ExecuteBatchPredict(Arg.Any<ReadOnlyMemory<int>>(), Arg.Any<Memory<int>>())
.Returns(async _ =>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
using FAI.Core.Abstractions;
using FAI.Core.BatchSchedulers;
using NSubstitute;

namespace FAI.Core.Tests.BatchSchedularTests;

Expand Down
4 changes: 1 addition & 3 deletions test/FAI.Core.Tests/ClassificationTaskTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@
using FAI.Core.Abstractions;
using FAI.Core.Configurations.InferenceTasks;
using FAI.Core.InferenceTasks.Classification;
using FAI.Core.ResultTypes;
using NSubstitute;

namespace FAI.Core.Tests;

Expand Down Expand Up @@ -65,7 +63,7 @@ public void GetClassificationResult_ReturnsHighestProbability()
float[] logits = [1.0f, 5.0f, 2.0f];

// Act
var result = options.GetClassificationResult<string, float>(logits);
var result = options.GetClassificationResult(logits);

// Assert
Assert.Equal("B", result.Choice);
Expand Down
3 changes: 1 addition & 2 deletions test/FAI.Core.Tests/InferenceStepsTests.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
using FAI.Core.Abstractions;
using NSubstitute;

namespace FAI.Core.Tests;

Expand All @@ -11,7 +10,7 @@ private class TestInferenceSteps : InferenceSteps<string, int, double, string>

public override Task<double> RunModel(ReadOnlyMemory<string> input, int preprocesses)
{
return Task.FromResult((double)preprocesses * 2.0);
return Task.FromResult(preprocesses * 2.0);
}

public override void PostProcess(ReadOnlySpan<string> inputs, int preprocesses, double modelOutput, Span<string> outputs)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
using FAI.Core.Abstractions;
using FAI.Core.PipelineBatchExecutors;
using NSubstitute;

namespace FAI.Core.Tests.PipelineBatchExecutorTests;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
using FAI.Core.Abstractions;
using FAI.Core.PipelineBatchExecutors;
using NSubstitute;

namespace FAI.Core.Tests.PipelineBatchExecutorTests;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
using System.Buffers;
using FAI.Core.Abstractions;
using FAI.Core.PipelineBatchExecutors;
using NSubstitute;

namespace FAI.Core.Tests.PipelineBatchExecutorTests;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
using FAI.Core.Abstractions;
using FAI.Core.PipelineBatchExecutors;
using NSubstitute;

namespace FAI.Core.Tests.PipelineBatchExecutorTests;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
using FAI.Core.Abstractions;
using FAI.Core.PipelineBatchExecutors;
using NSubstitute;

namespace FAI.Core.Tests.PipelineBatchExecutorTests;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
using FAI.Core.Abstractions;
using FAI.Core.PipelineBatchExecutors;
using Microsoft.Extensions.Logging.Abstractions;
using NSubstitute;

namespace FAI.Core.Tests.PipelineBatchExecutorTests;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
using FAI.Core.Abstractions;
using FAI.Core.Pipelines;
using Microsoft.Extensions.Logging.Abstractions;
using NSubstitute;

namespace FAI.Core.Tests.PipelineTests;

Expand Down
3 changes: 1 addition & 2 deletions test/FAI.Core.Tests/TensorExtensionsTests.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
using System.Numerics.Tensors;
using FAI.Core;

namespace FAI.Core.Tests;

Expand Down Expand Up @@ -48,7 +47,7 @@ public void AsMemory_Tensor_ReturnsCorrectMemory()
{
// Arrange
float[] data = [1.0f, 2.0f, 3.0f];
var tensor = Tensor.Create<float>(data, [3]);
var tensor = Tensor.Create(data, [3]);

// Act
var memory = tensor.AsMemory();
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
using FAI.Core.Abstractions;
using Microsoft.Extensions.Logging.Abstractions;
using NSubstitute;

namespace FAI.Extensions.Evaluation.Tests;

Expand Down
4 changes: 0 additions & 4 deletions test/FAI.IntegrationTests/GlobalUsings.cs
Original file line number Diff line number Diff line change
@@ -1,20 +1,16 @@
global using System.Numerics.Tensors;
global using System.Runtime.CompilerServices;
global using FAI.Core.Abstractions;
global using FAI.Core.BatchSchedulers;
global using FAI.Core.Configurations.InferenceTasks;
global using FAI.Core.Configurations.PipelineBatchExecutors;
global using FAI.Core.Extensions.DI;
global using FAI.Core.PipelineBatchExecutors;
global using FAI.Core.Pipelines;
global using FAI.Core.ResultTypes;
global using FAI.NLP.Configuration;
global using FAI.NLP.InferenceTasks.TextClassification;
global using FAI.NLP.InferenceTasks.TextMultipleChoice;
global using FAI.NLP.Tests.Mocks;
global using FAI.NLP.Tokenization;
global using FAI.Onnx.ModelExecutors;
global using FAI.Onnx.Tests.Utils;
global using FluentAssertions;
global using Microsoft.Extensions.DependencyInjection;
global using Xunit;
4 changes: 2 additions & 2 deletions test/FAI.IntegrationTests/LogicalMockModelExecutor.cs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ public LogicalMockModelExecutor(float[][] outputs)
public Task<Tensor<float>[]> RunAsync(Tensor<long>[] inputs)
{
var data = _outputs[_callCount % _outputs.Length];
var output = Tensor.Create<float>(data, [(nint)data.Length]);
var output = Tensor.Create(data, [data.Length]);
_callCount++;
return Task.FromResult(new[] { output });
}
Expand All @@ -31,7 +31,7 @@ public Task RunAsync(Tensor<long>[] inputs, Action<ReadOnlyTensorSpan<float>, in
row.AsSpan().CopyTo(batchOutput.AsSpan(i * outputSize));
}

var batchTensor = Tensor.Create<float>(batchOutput, [(nint)batchSize, (nint)outputSize]);
var batchTensor = Tensor.Create(batchOutput, [batchSize, outputSize]);
postProcess(batchTensor, 0); // Assuming model has 1 output tensor

return Task.CompletedTask;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ public async Task FullPipeline_ShouldHandleMultipleChoice()
.Use<TokenizerBatchExecutor<TextMultipleChoiceInput, ChoiceResult<TokenizedText>>>();

var tokenizer = DummyTokenizerFactory.Create();
services.AddSingleton<PretrainedTokenizer>(tokenizer);
services.AddSingleton(tokenizer);
services.AddSingleton(new TextMultipleChoiceOptions { MaxChoices = 2 });

// Mock model: always returns [0.9, 0.1] logits
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ public async Task ComplexPipeline_WithBackgroundAndPartitioning_ShouldProcessBat

// Setup dependencies
var tokenizer = DummyTokenizerFactory.Create();
services.AddSingleton<PretrainedTokenizer>(tokenizer);
services.AddSingleton(tokenizer);
services.AddSingleton<IBatchSchedular<TokenizedText, ClassificationResult<bool, float>>, ParallelBatchSchedular<TokenizedText, ClassificationResult<bool, float>>>();
services.AddSingleton<IBatchSlicer<TokenizedText>, FixedSizeBatchSlicer<TokenizedText>>();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ public async Task FullPipeline_ShouldClassifyText()
.Use<TokenizerBatchExecutor<TokenizedText, ClassificationResult<bool, float>>>();

var tokenizer = DummyTokenizerFactory.Create();
services.AddSingleton<PretrainedTokenizer>(tokenizer);
services.AddSingleton(tokenizer);

// Mock model: always returns high probability for 'true' (index 1)
services.AddSingleton<IModelExecutor<long, float>>(new LogicalMockModelExecutor([[0.1f, 0.9f]]));
Expand Down
Loading