Skip to content

Commit 1e91427

Browse files
authored
Move the Tokenizer's data into separate packages. (dotnet#7248)
* Move the Tokenizer's data into separate packages. * Address the feedback * More feedback addressing * More feedback addressing * Trimming/AoT support * Make data types internal
1 parent 189ba24 commit 1e91427

29 files changed

+729
-123
lines changed

Microsoft.ML.sln

Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -194,6 +194,18 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.GenAI.Mistral"
194194
EndProject
195195
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.GenAI.Mistral.Tests", "test\Microsoft.ML.GenAI.Mistral.Tests\Microsoft.ML.GenAI.Mistral.Tests.csproj", "{49264202-C90A-43F6-8C30-BDAEF2F1465A}"
196196
EndProject
197+
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.Tokenizers.Data.Cl100kBase", "src\Microsoft.ML.Tokenizers.Data.Cl100kBase\Microsoft.ML.Tokenizers.Data.Cl100kBase.csproj", "{14FB6EA7-A4A5-4491-AFBE-391AA27B8007}"
198+
EndProject
199+
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.Tokenizers.Data.Gpt2", "src\Microsoft.ML.Tokenizers.Data.Gpt2\Microsoft.ML.Tokenizers.Data.Gpt2.csproj", "{BCAD9EEF-01A0-459A-80A2-5C950AF275B8}"
200+
EndProject
201+
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.Tokenizers.Data.O200kBase", "src\Microsoft.ML.Tokenizers.Data.O200kBase\Microsoft.ML.Tokenizers.Data.O200kBase.csproj", "{D02DB243-5B96-4652-B172-35F18230434D}"
202+
EndProject
203+
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.Tokenizers.Data.P50kBase", "src\Microsoft.ML.Tokenizers.Data.P50kBase\Microsoft.ML.Tokenizers.Data.P50kBase.csproj", "{FF2E2A95-E889-45C3-9205-8FDA7CD342BA}"
204+
EndProject
205+
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.Tokenizers.Data.R50kBase", "src\Microsoft.ML.Tokenizers.Data.R50kBase\Microsoft.ML.Tokenizers.Data.R50kBase.csproj", "{E1AE4EF6-9DEE-4267-B37E-94A7B413754D}"
206+
EndProject
207+
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.Tokenizers.Data.Tests", "test\Microsoft.ML.Tokenizers.Data.Tests\Microsoft.ML.Tokenizers.Data.Tests.csproj", "{2E6055A1-3FC1-418E-9B3E-9C6255649F42}"
208+
EndProject
197209
Global
198210
GlobalSection(SolutionConfigurationPlatforms) = preSolution
199211
Debug|Any CPU = Debug|Any CPU
@@ -918,6 +930,54 @@ Global
918930
{49264202-C90A-43F6-8C30-BDAEF2F1465A}.Release|Any CPU.Build.0 = Release|Any CPU
919931
{49264202-C90A-43F6-8C30-BDAEF2F1465A}.Release|x64.ActiveCfg = Release|Any CPU
920932
{49264202-C90A-43F6-8C30-BDAEF2F1465A}.Release|x64.Build.0 = Release|Any CPU
933+
{14FB6EA7-A4A5-4491-AFBE-391AA27B8007}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
934+
{14FB6EA7-A4A5-4491-AFBE-391AA27B8007}.Debug|Any CPU.Build.0 = Debug|Any CPU
935+
{14FB6EA7-A4A5-4491-AFBE-391AA27B8007}.Debug|x64.ActiveCfg = Debug|Any CPU
936+
{14FB6EA7-A4A5-4491-AFBE-391AA27B8007}.Debug|x64.Build.0 = Debug|Any CPU
937+
{14FB6EA7-A4A5-4491-AFBE-391AA27B8007}.Release|Any CPU.ActiveCfg = Release|Any CPU
938+
{14FB6EA7-A4A5-4491-AFBE-391AA27B8007}.Release|Any CPU.Build.0 = Release|Any CPU
939+
{14FB6EA7-A4A5-4491-AFBE-391AA27B8007}.Release|x64.ActiveCfg = Release|Any CPU
940+
{14FB6EA7-A4A5-4491-AFBE-391AA27B8007}.Release|x64.Build.0 = Release|Any CPU
941+
{BCAD9EEF-01A0-459A-80A2-5C950AF275B8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
942+
{BCAD9EEF-01A0-459A-80A2-5C950AF275B8}.Debug|Any CPU.Build.0 = Debug|Any CPU
943+
{BCAD9EEF-01A0-459A-80A2-5C950AF275B8}.Debug|x64.ActiveCfg = Debug|Any CPU
944+
{BCAD9EEF-01A0-459A-80A2-5C950AF275B8}.Debug|x64.Build.0 = Debug|Any CPU
945+
{BCAD9EEF-01A0-459A-80A2-5C950AF275B8}.Release|Any CPU.ActiveCfg = Release|Any CPU
946+
{BCAD9EEF-01A0-459A-80A2-5C950AF275B8}.Release|Any CPU.Build.0 = Release|Any CPU
947+
{BCAD9EEF-01A0-459A-80A2-5C950AF275B8}.Release|x64.ActiveCfg = Release|Any CPU
948+
{BCAD9EEF-01A0-459A-80A2-5C950AF275B8}.Release|x64.Build.0 = Release|Any CPU
949+
{D02DB243-5B96-4652-B172-35F18230434D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
950+
{D02DB243-5B96-4652-B172-35F18230434D}.Debug|Any CPU.Build.0 = Debug|Any CPU
951+
{D02DB243-5B96-4652-B172-35F18230434D}.Debug|x64.ActiveCfg = Debug|Any CPU
952+
{D02DB243-5B96-4652-B172-35F18230434D}.Debug|x64.Build.0 = Debug|Any CPU
953+
{D02DB243-5B96-4652-B172-35F18230434D}.Release|Any CPU.ActiveCfg = Release|Any CPU
954+
{D02DB243-5B96-4652-B172-35F18230434D}.Release|Any CPU.Build.0 = Release|Any CPU
955+
{D02DB243-5B96-4652-B172-35F18230434D}.Release|x64.ActiveCfg = Release|Any CPU
956+
{D02DB243-5B96-4652-B172-35F18230434D}.Release|x64.Build.0 = Release|Any CPU
957+
{FF2E2A95-E889-45C3-9205-8FDA7CD342BA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
958+
{FF2E2A95-E889-45C3-9205-8FDA7CD342BA}.Debug|Any CPU.Build.0 = Debug|Any CPU
959+
{FF2E2A95-E889-45C3-9205-8FDA7CD342BA}.Debug|x64.ActiveCfg = Debug|Any CPU
960+
{FF2E2A95-E889-45C3-9205-8FDA7CD342BA}.Debug|x64.Build.0 = Debug|Any CPU
961+
{FF2E2A95-E889-45C3-9205-8FDA7CD342BA}.Release|Any CPU.ActiveCfg = Release|Any CPU
962+
{FF2E2A95-E889-45C3-9205-8FDA7CD342BA}.Release|Any CPU.Build.0 = Release|Any CPU
963+
{FF2E2A95-E889-45C3-9205-8FDA7CD342BA}.Release|x64.ActiveCfg = Release|Any CPU
964+
{FF2E2A95-E889-45C3-9205-8FDA7CD342BA}.Release|x64.Build.0 = Release|Any CPU
965+
{E1AE4EF6-9DEE-4267-B37E-94A7B413754D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
966+
{E1AE4EF6-9DEE-4267-B37E-94A7B413754D}.Debug|Any CPU.Build.0 = Debug|Any CPU
967+
{E1AE4EF6-9DEE-4267-B37E-94A7B413754D}.Debug|x64.ActiveCfg = Debug|Any CPU
968+
{E1AE4EF6-9DEE-4267-B37E-94A7B413754D}.Debug|x64.Build.0 = Debug|Any CPU
969+
{E1AE4EF6-9DEE-4267-B37E-94A7B413754D}.Release|Any CPU.ActiveCfg = Release|Any CPU
970+
{E1AE4EF6-9DEE-4267-B37E-94A7B413754D}.Release|Any CPU.Build.0 = Release|Any CPU
971+
{E1AE4EF6-9DEE-4267-B37E-94A7B413754D}.Release|x64.ActiveCfg = Release|Any CPU
972+
{E1AE4EF6-9DEE-4267-B37E-94A7B413754D}.Release|x64.Build.0 = Release|Any CPU
973+
{2E6055A1-3FC1-418E-9B3E-9C6255649F42}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
974+
{2E6055A1-3FC1-418E-9B3E-9C6255649F42}.Debug|Any CPU.Build.0 = Debug|Any CPU
975+
{2E6055A1-3FC1-418E-9B3E-9C6255649F42}.Debug|x64.ActiveCfg = Debug|Any CPU
976+
{2E6055A1-3FC1-418E-9B3E-9C6255649F42}.Debug|x64.Build.0 = Debug|Any CPU
977+
{2E6055A1-3FC1-418E-9B3E-9C6255649F42}.Release|Any CPU.ActiveCfg = Release|Any CPU
978+
{2E6055A1-3FC1-418E-9B3E-9C6255649F42}.Release|Any CPU.Build.0 = Release|Any CPU
979+
{2E6055A1-3FC1-418E-9B3E-9C6255649F42}.Release|x64.ActiveCfg = Release|Any CPU
980+
{2E6055A1-3FC1-418E-9B3E-9C6255649F42}.Release|x64.Build.0 = Release|Any CPU
921981
EndGlobalSection
922982
GlobalSection(SolutionProperties) = preSolution
923983
HideSolutionNode = FALSE
@@ -1013,6 +1073,12 @@ Global
10131073
{D202353D-6FAF-4263-9A01-BDCFBC92391F} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
10141074
{2729CC66-7743-442B-B3A5-1F4F27F044A5} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
10151075
{49264202-C90A-43F6-8C30-BDAEF2F1465A} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
1076+
{14FB6EA7-A4A5-4491-AFBE-391AA27B8007} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
1077+
{BCAD9EEF-01A0-459A-80A2-5C950AF275B8} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
1078+
{D02DB243-5B96-4652-B172-35F18230434D} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
1079+
{FF2E2A95-E889-45C3-9205-8FDA7CD342BA} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
1080+
{E1AE4EF6-9DEE-4267-B37E-94A7B413754D} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
1081+
{2E6055A1-3FC1-418E-9B3E-9C6255649F42} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
10161082
EndGlobalSection
10171083
GlobalSection(ExtensibilityGlobals) = postSolution
10181084
SolutionGuid = {41165AF1-35BB-4832-A189-73060F82B01D}

eng/TokenizerData.targets

Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
<Project>
2+
<UsingTask TaskName="CompressFile"
3+
TaskFactory="RoslynCodeTaskFactory"
4+
AssemblyFile="$(MSBuildToolsPath)\Microsoft.Build.Tasks.Core.dll" >
5+
<ParameterGroup>
6+
<Files ParameterType="Microsoft.Build.Framework.ITaskItem[]" Required="true" />
7+
</ParameterGroup>
8+
<Task>
9+
<Using Namespace="System.Globalization" />
10+
<Using Namespace="System.IO" />
11+
<Using Namespace="System.IO.Compression" />
12+
<Code Type="Fragment" Language="cs">
13+
<![CDATA[
14+
foreach (var file in Files)
15+
{
16+
string fileName = file.GetMetadata("FullPath");
17+
string fileContent = File.ReadAllText(fileName);
18+
int capacity = 1;
19+
int eolIndex = 0;
20+
do
21+
{
22+
if ((eolIndex = fileContent.IndexOf('\n', eolIndex)) >= 0)
23+
{
24+
eolIndex++;
25+
capacity++;
26+
}
27+
else
28+
{
29+
break;
30+
}
31+
} while (eolIndex < fileContent.Length);
32+
33+
using var sourceStream = File.OpenRead(fileName);
34+
using var reader = new StreamReader(sourceStream);
35+
using var destStream = new DeflateStream(File.Create(file.GetMetadata("Destination")), CompressionLevel.Optimal);
36+
using var streamWriter = new StreamWriter(destStream);
37+
38+
streamWriter.WriteLine($"Capacity: {capacity.ToString(CultureInfo.InvariantCulture)}");
39+
40+
string line;
41+
int destLineNumber = 0;
42+
43+
while ((line = reader.ReadLine()) != null)
44+
{
45+
if (line.Length == 0) { continue; }
46+
int index = line.IndexOf(' ');
47+
48+
if (index <= 0 || index == line.Length - 1 || !int.TryParse(line.Substring(index + 1), out int id) || id < destLineNumber)
49+
{
50+
Log.LogError($"Invalid format in the file {file.GetMetadata("FullPath")} line {line}");
51+
break;
52+
}
53+
54+
while (destLineNumber < id)
55+
{
56+
// ensure id always aligns with the line number
57+
streamWriter.WriteLine();
58+
destLineNumber++;
59+
}
60+
61+
streamWriter.WriteLine(line.Substring(0, index));
62+
destLineNumber++;
63+
}
64+
}
65+
]]>
66+
</Code>
67+
</Task>
68+
</UsingTask>
69+
70+
<Target Name="CompressTiktokenData"
71+
BeforeTargets="AssignTargetPaths"
72+
DependsOnTargets="_EnsureTokenizerDataEmbeddedResourceDestination"
73+
Inputs="@(TokenizerDataEmbeddedResource)"
74+
Outputs="@(TokenizerDataEmbeddedResource->'%(Destination)')">
75+
76+
<CompressFile Files="@(TokenizerDataEmbeddedResource)" />
77+
78+
<ItemGroup>
79+
<EmbeddedResource Include="@(TokenizerDataEmbeddedResource->'%(Destination)')" LogicalName="%(FileName)%(Extension).deflate" />
80+
</ItemGroup>
81+
</Target>
82+
83+
<Target Name="_EnsureTokenizerDataEmbeddedResourceDestination" >
84+
<ItemGroup>
85+
<TokenizerDataEmbeddedResource Condition="'%(TokenizerDataEmbeddedResource.Destination)' == ''" Destination="$(IntermediateOutputPath)%(FileName).deflate" />
86+
</ItemGroup>
87+
</Target>
88+
</Project>
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
// Licensed to the .NET Foundation under one or more agreements.
2+
// The .NET Foundation licenses this file to you under the MIT license.
3+
// See the LICENSE file in the project root for more information.
4+
5+
namespace Microsoft.ML.Tokenizers
6+
{
7+
/// <summary>
8+
/// Cl100kBaseTokenizerData is internally used by Microsoft.ML.Tokenizers library to bind to the cl100k_base.tiktoken data file.
9+
/// </summary>
10+
internal sealed class Cl100kBaseTokenizerData
11+
{
12+
}
13+
}
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
<Project Sdk="Microsoft.NET.Sdk">
2+
3+
<PropertyGroup>
4+
<TargetFramework>netstandard2.0</TargetFramework>
5+
<Nullable>enable</Nullable>
6+
<IsPackable>true</IsPackable>
7+
<PackageDescription>The Microsoft.ML.Tokenizers.Data.Cl100kBase class includes the Tiktoken tokenizer data file cl100k_base.tiktoken, which is utilized by models such as GPT-4.</PackageDescription>
8+
</PropertyGroup>
9+
10+
<ItemGroup>
11+
<!--
12+
The following file are compressed using the DeflateStream and embedded as resources in the assembly.
13+
The files are downloaded from the following sources and compressed to the Destination.
14+
- cl100k_base.tiktoken: https://openaipublic.blob.core.windows.net/encodings/cl100k_base.tiktoken
15+
16+
The file under MIT copyright license https://github.com/openai/tiktoken/blob/main/LICENSE
17+
18+
In the CompressFile task above we modify the file's content to elimenate the ranks, thus reducing the file size,
19+
since the rank corresponds to the line number in the file. For the file p50k_base.tiktoken,
20+
we introduce empty lines to replace any missing ranks, ensuring that the rank consistently aligns with the line number.
21+
After we eleminate the ranks from the file, we compress the file using the DeflateStream and embed it as a resource in the assembly.
22+
-->
23+
<TokenizerDataEmbeddedResource Include="Data\cl100k_base.tiktoken" />
24+
</ItemGroup>
25+
26+
<ItemGroup>
27+
<ProjectReference Include="..\Microsoft.ML.Tokenizers\Microsoft.ML.Tokenizers.csproj"/>
28+
</ItemGroup>
29+
30+
<Import Project="$(RepositoryEngineeringDir)TokenizerData.targets" />
31+
</Project>
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
## About
2+
3+
The `Microsoft.ML.Tokenizers.Data.Cl100kBase` includes the Tiktoken tokenizer data file `cl100k_base.tiktoken`, which is utilized by models such as GPT-4.
4+
5+
## Key Features
6+
7+
* This package mainly contains the cl100k_base.tiktoken file, which is used by the Tiktoken tokenizer. This data file is used by the following models:
8+
1. gpt-4
9+
2. gpt-3.5-turbo
10+
3. gpt-3.5-turbo-16k
11+
4. gpt-35
12+
5. gpt-35-turbo
13+
6. gpt-35-turbo-16k
14+
7. text-embedding-ada-002
15+
8. text-embedding-3-small
16+
9. text-embedding-3-large
17+
18+
## How to Use
19+
20+
Reference this package in your project to use the Tiktoken tokenizer with the specified models.
21+
22+
```csharp
23+
24+
// Create a tokenizer for the specified model or any other listed model name
25+
Tokenizer tokenizer = TiktokenTokenizer.CreateForModel("gpt-4");
26+
27+
// Create a tokenizer for the specified encoding
28+
Tokenizer tokenizer = TiktokenTokenizer.CreateForEncoding("cl100k_base");
29+
30+
```
31+
32+
## Main Types
33+
34+
Users shouldn't use any types exposed by this package directly. This package is intended to provide tokenizer data files.
35+
36+
## Additional Documentation
37+
38+
* [API documentation](https://learn.microsoft.com/en-us/dotnet/api/microsoft.ml.tokenizers)
39+
40+
## Related Packages
41+
42+
<!-- The related packages associated with this package -->
43+
Microsoft.ML.Tokenizers
44+
45+
## Feedback & Contributing
46+
47+
Microsoft.ML.Tokenizers.Data.Cl100kBase is released as open source under the [MIT license](https://licenses.nuget.org/MIT). Bug reports and contributions are welcome at [the GitHub repository](https://github.com/dotnet/machinelearning).
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
// Licensed to the .NET Foundation under one or more agreements.
2+
// The .NET Foundation licenses this file to you under the MIT license.
3+
// See the LICENSE file in the project root for more information.
4+
5+
namespace Microsoft.ML.Tokenizers
6+
{
7+
/// <summary>
8+
/// Gpt2TokenizerData is internally used by Microsoft.ML.Tokenizers library to bind to the gpt2.tiktoken data file.
9+
/// </summary>
10+
internal sealed class Gpt2TokenizerData
11+
{
12+
}
13+
}
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
<Project Sdk="Microsoft.NET.Sdk">
2+
3+
<PropertyGroup>
4+
<TargetFramework>netstandard2.0</TargetFramework>
5+
<Nullable>enable</Nullable>
6+
<IsPackable>true</IsPackable>
7+
<PackageDescription>The Microsoft.ML.Tokenizers.Data.Gpt2 includes the Tiktoken tokenizer data file gpt2.tiktoken, which is utilized by models such as Gpt-2.</PackageDescription>
8+
</PropertyGroup>
9+
10+
<ItemGroup>
11+
<!--
12+
The following file are compressed using the DeflateStream and embedded as resources in the assembly.
13+
The files are downloaded from the following sources and compressed to the Destination.
14+
- gpt2.tiktoken: https://fossies.org/linux/misc/whisper-20231117.tar.gz/whisper-20231117/whisper/assets/gpt2.tiktoken?m=b
15+
16+
The file under MIT copyright license https://github.com/openai/tiktoken/blob/main/LICENSE
17+
18+
In the CompressFile task above we modify the file's content to elimenate the ranks, thus reducing the file size,
19+
since the rank corresponds to the line number in the file. For the file p50k_base.tiktoken,
20+
we introduce empty lines to replace any missing ranks, ensuring that the rank consistently aligns with the line number.
21+
After we eleminate the ranks from the file, we compress the file using the DeflateStream and embed it as a resource in the assembly.
22+
-->
23+
<TokenizerDataEmbeddedResource Include="Data\gpt2.tiktoken" />
24+
</ItemGroup>
25+
26+
<ItemGroup>
27+
<ProjectReference Include="..\Microsoft.ML.Tokenizers\Microsoft.ML.Tokenizers.csproj"/>
28+
</ItemGroup>
29+
30+
<Import Project="$(RepositoryEngineeringDir)TokenizerData.targets" />
31+
</Project>
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
## About
2+
3+
The `Microsoft.ML.Tokenizers.Data.Gpt2` includes the Tiktoken tokenizer data file gpt2.tiktoken, which is utilized by models such as `Gpt-2`.
4+
5+
## Key Features
6+
7+
* This package mainly contains the gpt2.tiktoken file, which is used by the Tiktoken tokenizer. This data file is used by the Gpt-2 model.
8+
9+
## How to Use
10+
11+
Reference this package in your project to use the Tiktoken tokenizer with the specified model.
12+
13+
```csharp
14+
15+
// Create a tokenizer for the specified model
16+
Tokenizer tokenizer = TiktokenTokenizer.CreateForModel("Gpt-2");
17+
18+
```
19+
20+
## Main Types
21+
22+
Users shouldn't use any types exposed by this package directly. This package is intended to provide tokenizer data files.
23+
24+
## Additional Documentation
25+
26+
* [API documentation](https://learn.microsoft.com/en-us/dotnet/api/microsoft.ml.tokenizers)
27+
28+
## Related Packages
29+
30+
<!-- The related packages associated with this package -->
31+
Microsoft.ML.Tokenizers
32+
33+
## Feedback & Contributing
34+
35+
Microsoft.ML.Tokenizers.Data.Gpt2 is released as open source under the [MIT license](https://licenses.nuget.org/MIT). Bug reports and contributions are welcome at [the GitHub repository](https://github.com/dotnet/machinelearning).

0 commit comments

Comments
 (0)