Skip to content

Commit 078d9c3

Browse files
authored
Merge pull request #89 from sdcb/dev
Dev
2 parents b93f4a9 + 2f2d0e0 commit 078d9c3

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

42 files changed

+629
-130
lines changed

src/BE/Chats.BE.csproj

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
<Nullable>enable</Nullable>
66
<ImplicitUsings>enable</ImplicitUsings>
77
<UserSecretsId>d4aa34e2-6c5f-41b9-b61b-c1a48b1d1b44</UserSecretsId>
8-
<Version>1.2.0</Version>
8+
<Version>1.3.0</Version>
99
</PropertyGroup>
1010

1111
<ItemGroup>

src/BE/Controllers/Chats/Files/FileController.cs

Lines changed: 57 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
using Chats.BE.Infrastructure.Functional;
1212
using Chats.BE.Controllers.Chats.Messages.Dtos;
1313
using Microsoft.Net.Http.Headers;
14+
using Chats.BE.Controllers.Common.Dtos;
1415

1516
namespace Chats.BE.Controllers.Chats.Files;
1617

@@ -54,12 +55,12 @@ public async Task<ActionResult<FileDto>> Upload(int fileServiceId, IFormFile fil
5455
}
5556

5657
private async Task<ActionResult<FileDto>> UploadPrivate(ChatsDB db, IFormFile file,
57-
FileServiceFactory fileServiceFactory,
58-
ILogger<FileController> logger,
59-
ClientInfoManager clientInfoManager,
60-
FileUrlProvider fdup,
58+
FileServiceFactory fileServiceFactory,
59+
ILogger<FileController> logger,
60+
ClientInfoManager clientInfoManager,
61+
FileUrlProvider fdup,
6162
CurrentUser currentUser,
62-
DB.FileService fileService,
63+
DB.FileService fileService,
6364
FileContentTypeService fileContentTypeService,
6465
FileImageInfoService fileImageInfoService,
6566
CancellationToken cancellationToken)
@@ -178,4 +179,54 @@ internal ActionResult ServeStaticFile(DB.File file)
178179
return Redirect(downloadUrl.ToString());
179180
}
180181
}
181-
}
182+
183+
[HttpGet("file")]
184+
public async Task<ActionResult<PagedResult<FileDto>>> QueryFiles(PagingRequest query,
185+
[FromServices] CurrentUser currentUser,
186+
[FromServices] FileUrlProvider fdup,
187+
CancellationToken cancellationToken)
188+
{
189+
IQueryable<DB.File> queryable = db.Files
190+
.Where(x => x.CreateUserId == currentUser.Id)
191+
.OrderByDescending(x => x.Id);
192+
PagedResult<FileDto> pagedResult = await PagedResult.FromTempQuery(queryable, query, fdup.CreateFileDto, cancellationToken);
193+
return Ok(pagedResult);
194+
}
195+
196+
[HttpDelete("file/{encryptedFileId}")]
197+
public async Task<ActionResult> DeleteFile(string encryptedFileId,
198+
[FromServices] CurrentUser currentUser,
199+
CancellationToken cancellationToken)
200+
{
201+
int fileId = urlEncryption.DecryptFileId(encryptedFileId);
202+
DB.File? file = await db.Files
203+
.Include(x => x.FileService)
204+
.Include(x => x.MessageContentFiles)
205+
.FirstOrDefaultAsync(x => x.Id == fileId, cancellationToken);
206+
if (file == null)
207+
{
208+
return NotFound("File not found.");
209+
}
210+
if (file.CreateUserId != currentUser.Id && !currentUser.IsAdmin)
211+
{
212+
// only the creator or admin can delete the file
213+
return NotFound("File not found.");
214+
}
215+
216+
if (file.MessageContentFiles.Count != 0)
217+
{
218+
return BadRequest("File is used in messages, cannot delete.");
219+
}
220+
221+
IFileService fs = fileServiceFactory.Create(file.FileService);
222+
bool deleted = await fs.Delete(file.StorageKey, cancellationToken);
223+
if (!deleted)
224+
{
225+
logger.LogWarning("Failed to delete file {FileId} from file service {FileServiceId}", file.Id, file.FileServiceId);
226+
}
227+
db.Files.Remove(file);
228+
await db.SaveChangesAsync(cancellationToken);
229+
230+
return NoContent();
231+
}
232+
}

src/BE/DB/Extensions/ModelReference.cs

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,6 @@ public partial class ModelReference
1717
_ => false
1818
};
1919

20-
public static bool SupportsResponseAPI(string modelReferenceName) => modelReferenceName switch
21-
{
22-
"o3" => true,
23-
"o4-mini" => true,
24-
_ => false
25-
};
26-
2720
public static bool SupportReasoningEffort(string modelReferenceName)
2821
{
2922
return modelReferenceName switch
@@ -36,6 +29,7 @@ public static bool SupportReasoningEffort(string modelReferenceName)
3629
"o4-mini" => true,
3730
"gemini-2.5-pro-exp-03-25" => true,
3831
"gemini-2.5-flash-preview-04-17" => true,
32+
"gpt-image-1" => true,
3933
_ => false
4034
};
4135
}

src/BE/DB/Init/BasicData.cs

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ private static void InsertModelProviders(ChatsDB db)
147147

148148
private static void InsertModelReferences(ChatsDB db)
149149
{
150-
// Generated from data, hash: 852a05a6c23337fa3522b93eeb09f6d045a9a89d3238047ef8cd288ac69b427e
150+
// Generated from data, hash: 340bce5e9638ecbc2678c6f6ddc577d28e55875ee79e8cc0effa69b8818c2f07
151151
db.ModelReferences.AddRange(
152152
[
153153
new(){ Id=0, ProviderId=0, Name="Test", DisplayName=null, PublishDate=null, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=2048, MaxResponseTokens=2048, TokenizerId=1, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", },
@@ -177,6 +177,7 @@ private static void InsertModelReferences(ChatsDB db)
177177
new(){ Id=123, ProviderId=1, Name="gpt-4.1-nano", DisplayName="gpt-4.1", PublishDate=new DateOnly(2025, 4, 14), MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=1047576, MaxResponseTokens=32768, TokenizerId=2, InputTokenPrice1M=0.10000M, OutputTokenPrice1M=0.40000M, CurrencyCode="USD", },
178178
new(){ Id=124, ProviderId=1, Name="o3", DisplayName=null, PublishDate=new DateOnly(2025, 4, 16), MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=200000, MaxResponseTokens=100000, TokenizerId=2, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=40.00000M, CurrencyCode="USD", },
179179
new(){ Id=125, ProviderId=1, Name="o4-mini", DisplayName=null, PublishDate=new DateOnly(2025, 4, 16), MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=200000, MaxResponseTokens=100000, TokenizerId=2, InputTokenPrice1M=1.10000M, OutputTokenPrice1M=4.40000M, CurrencyCode="USD", },
180+
new(){ Id=126, ProviderId=1, Name="gpt-image-1", DisplayName=null, PublishDate=new DateOnly(2025, 4, 16), MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=false, AllowStreaming=false, ReasoningResponseKindId=0, ContextWindow=65536, MaxResponseTokens=10, TokenizerId=2, InputTokenPrice1M=5.00000M, OutputTokenPrice1M=40.00000M, CurrencyCode="USD", },
180181
new(){ Id=200, ProviderId=2, Name="hunyuan-turbo", DisplayName=null, PublishDate=null, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=32768, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=50.00000M, CurrencyCode="RMB", },
181182
new(){ Id=201, ProviderId=2, Name="hunyuan-pro", DisplayName=null, PublishDate=null, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=32768, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=30.00000M, OutputTokenPrice1M=100.00000M, CurrencyCode="RMB", },
182183
new(){ Id=202, ProviderId=2, Name="hunyuan-standard-256K", DisplayName=null, PublishDate=null, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=262144, MaxResponseTokens=6144, TokenizerId=null, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="RMB", },
@@ -226,6 +227,7 @@ private static void InsertModelReferences(ChatsDB db)
226227
new(){ Id=523, ProviderId=5, Name="gpt-4.1-nano", DisplayName="gpt-4.1", PublishDate=new DateOnly(2025, 4, 14), MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=1047576, MaxResponseTokens=32768, TokenizerId=2, InputTokenPrice1M=0.10000M, OutputTokenPrice1M=0.40000M, CurrencyCode="USD", },
227228
new(){ Id=524, ProviderId=5, Name="o3", DisplayName=null, PublishDate=new DateOnly(2025, 4, 16), MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=200000, MaxResponseTokens=100000, TokenizerId=2, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=40.00000M, CurrencyCode="USD", },
228229
new(){ Id=525, ProviderId=5, Name="o4-mini", DisplayName=null, PublishDate=new DateOnly(2025, 4, 16), MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=200000, MaxResponseTokens=100000, TokenizerId=2, InputTokenPrice1M=1.10000M, OutputTokenPrice1M=4.40000M, CurrencyCode="USD", },
230+
new(){ Id=526, ProviderId=5, Name="gpt-image-1", DisplayName=null, PublishDate=new DateOnly(2025, 4, 16), MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=false, AllowStreaming=false, ReasoningResponseKindId=0, ContextWindow=65536, MaxResponseTokens=10, TokenizerId=2, InputTokenPrice1M=5.00000M, OutputTokenPrice1M=40.00000M, CurrencyCode="USD", },
229231
new(){ Id=600, ProviderId=6, Name="ernie-4.0-turbo-128k", DisplayName="ERNIE-4.0-Turbo", PublishDate=null, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=20.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="RMB", },
230232
new(){ Id=601, ProviderId=6, Name="ernie-4.0-8k", DisplayName="ERNIE-4.0", PublishDate=null, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=30.00000M, OutputTokenPrice1M=90.00000M, CurrencyCode="RMB", },
231233
new(){ Id=602, ProviderId=6, Name="ernie-3.5-8k", DisplayName="ERNIE-3.5", PublishDate=null, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=0.80000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", },
@@ -244,6 +246,10 @@ private static void InsertModelReferences(ChatsDB db)
244246
new(){ Id=619, ProviderId=6, Name="deepseek-r1-distill-qwen-7b", DisplayName="DeepSeek-R1", PublishDate=null, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=1, ContextWindow=64000, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", },
245247
new(){ Id=620, ProviderId=6, Name="deepseek-r1-distill-llama-70b", DisplayName="DeepSeek-R1", PublishDate=null, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=1, ContextWindow=64000, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=2.00000M, OutputTokenPrice1M=8.00000M, CurrencyCode="RMB", },
246248
new(){ Id=621, ProviderId=6, Name="ernie-4.5-8k-preview", DisplayName="ERNIE-4.5", PublishDate=null, MinTemperature=0.01M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=4.00000M, OutputTokenPrice1M=16.00000M, CurrencyCode="RMB", },
249+
new(){ Id=622, ProviderId=6, Name="ernie-x1-turbo-32k", DisplayName="ERNIE-X1", PublishDate=new DateOnly(2025, 4, 27), MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=1, ContextWindow=32768, MaxResponseTokens=12288, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=4.00000M, CurrencyCode="RMB", },
250+
new(){ Id=623, ProviderId=6, Name="ernie-4.5-turbo-vl-32k", DisplayName="ERNIE-4.5", PublishDate=new DateOnly(2025, 4, 27), MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=32768, MaxResponseTokens=12288, TokenizerId=null, InputTokenPrice1M=3.00000M, OutputTokenPrice1M=9.00000M, CurrencyCode="RMB", },
251+
new(){ Id=624, ProviderId=6, Name="ernie-4.5-turbo-128k", DisplayName="ERNIE-4.5", PublishDate=new DateOnly(2025, 4, 27), MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=131072, MaxResponseTokens=12288, TokenizerId=null, InputTokenPrice1M=0.80000M, OutputTokenPrice1M=3.20000M, CurrencyCode="RMB", },
252+
new(){ Id=625, ProviderId=6, Name="ernie-4.5-turbo-32k", DisplayName="ERNIE-4.5", PublishDate=new DateOnly(2025, 4, 27), MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=32768, MaxResponseTokens=12288, TokenizerId=null, InputTokenPrice1M=0.80000M, OutputTokenPrice1M=3.20000M, CurrencyCode="RMB", },
247253
new(){ Id=700, ProviderId=7, Name="qwen-max", DisplayName=null, PublishDate=null, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=32768, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=20.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="RMB", },
248254
new(){ Id=701, ProviderId=7, Name="qwen-plus", DisplayName=null, PublishDate=null, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=0.80000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", },
249255
new(){ Id=702, ProviderId=7, Name="qwen-turbo", DisplayName=null, PublishDate=null, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ReasoningResponseKindId=0, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=0.30000M, OutputTokenPrice1M=0.60000M, CurrencyCode="RMB", },

src/BE/Services/Models/ChatServices/ChatFactory.cs

Lines changed: 17 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
using Chats.BE.Services.Models.ChatServices.GoogleAI;
44
using Chats.BE.Services.Models.ChatServices.OpenAI;
55
using Chats.BE.Services.Models.ChatServices.OpenAI.QianFan;
6+
using Chats.BE.Services.Models.ChatServices.OpenAI.Special;
67
using Chats.BE.Services.Models.ChatServices.Test;
78
using Chats.BE.Services.Models.ModelLoaders;
89
using OpenAI.Chat;
@@ -17,11 +18,16 @@ public ChatService CreateChatService(Model model)
1718
ChatService cs = modelProvider switch
1819
{
1920
DBModelProvider.Test => new TestChatService(model),
20-
DBModelProvider.OpenAI => new OpenAIChatService(model),
21-
DBModelProvider.AzureOpenAI => ModelReference.SupportsResponseAPI(model.ModelReference.Name) switch
21+
DBModelProvider.OpenAI => model.ModelReference.Name switch
2222
{
23-
true => new AzureResponseApiService(model),
24-
false => new AzureChatService(model)
23+
"gpt-image-1" => new ImageGenerationChatService(model),
24+
_ => new AzureChatService(model),
25+
},
26+
DBModelProvider.AzureOpenAI => model.ModelReference.Name switch
27+
{
28+
"o3" or "o4-mini" => new AzureResponseApiService(model),
29+
"gpt-image-1" => new AzureImageGenerationChatService(model),
30+
_ => new AzureChatService(model),
2531
},
2632
DBModelProvider.WenXinQianFan => new QianFanChatService(model),
2733
DBModelProvider.AliyunDashscope => new QwenChatService(model),
@@ -82,7 +88,13 @@ public async Task<ModelValidateResult> ValidateModel(ModelKey modelKey, ModelRef
8288
});
8389
try
8490
{
85-
await foreach (var seg in cs.ChatStreamedFEProcessed([new UserChatMessage("1+1=?")], new ChatCompletionOptions(), ChatExtraDetails.Default, cancellationToken))
91+
ChatCompletionOptions cco = new();
92+
if (ModelReference.SupportReasoningEffort(modelReference.Name))
93+
{
94+
cco.ReasoningEffortLevel = ChatReasoningEffortLevel.Low;
95+
}
96+
97+
await foreach (var seg in cs.ChatStreamedFEProcessed([new UserChatMessage("1+1=?")], cco, ChatExtraDetails.Default, cancellationToken))
8698
{
8799
if (seg.IsFromUpstream)
88100
{

0 commit comments

Comments
 (0)