Skip to content

Commit 140f7f2

Browse files
authored
Merge pull request #6 from dotnetprog/feature/AdjustImportRows
Improvements and unit tests
2 parents 6e08556 + 0ed2f11 commit 140f7f2

File tree

6 files changed

+107
-92
lines changed

6 files changed

+107
-92
lines changed

src/Dataverse.ConfigurationMigrationTool/Console.Tests/Features/Import/Commands/ImportCommandsTest.cs

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,10 @@
22
using Dataverse.ConfigurationMigrationTool.Console.Features.Import.Commands;
33
using Dataverse.ConfigurationMigrationTool.Console.Features.Import.Model;
44
using Dataverse.ConfigurationMigrationTool.Console.Features.Shared;
5+
using Dataverse.ConfigurationMigrationTool.Console.Tests.Extensions;
56
using Microsoft.Extensions.Logging;
67
using NSubstitute;
8+
using Shouldly;
79

810
namespace Dataverse.ConfigurationMigrationTool.Console.Tests.Features.Import.Commands;
911
public class ImportCommandsTest
@@ -69,5 +71,48 @@ public async Task GivenDataToImportWithSchema_WhenTheCommandExecutes_ThenItShoul
6971
});
7072

7173
}
74+
[Fact]
75+
public async Task GivenAnInvalidSchema_WhenTheCommandExecutes_ThenItShouldFailAndLogIssues()
76+
{
77+
//Arrange
78+
var importSchema = new ImportSchema
79+
{
80+
Entity = new()
81+
{
82+
FakeSchemas.Account,
83+
FakeSchemas.Contact,
84+
FakeSchemas.Opportunity
7285

86+
}
87+
};
88+
var datasets = new Entities
89+
{
90+
Entity = new()
91+
{
92+
FakeDatasets.AccountSets,
93+
FakeDatasets.ContactSets,
94+
FakeDatasets.OpportunitiesSet
95+
}
96+
};
97+
_importDataService.Execute(Arg.Any<ImportDataTask>(), Arg.Any<Entities>())
98+
.Returns(TaskResult.Completed);
99+
_importDataProvider.ReadFromFile(DataFilePath).Returns(datasets);
100+
_importDataProvider.ReadSchemaFromFile(SchemaFilePath).Returns(importSchema);
101+
_schemaValidator.Validate(importSchema).Returns(new ValidationResult()
102+
{
103+
Failures = new List<ValidationFailure>
104+
{
105+
new ("Entity", "Entity is not valid")
106+
}
107+
});
108+
//Act
109+
Func<Task> act = () => _importCommands.Import(SchemaFilePath, DataFilePath);
110+
111+
//Assert
112+
var ex = await act.ShouldThrowAsync<Exception>();
113+
ex.Message.ShouldBe("Provided Schema was not valid.");
114+
_logger.ShouldHaveLogged(LogLevel.Error, "Schema failed validation process with 1 failure(s).");
115+
116+
117+
}
73118
}

src/Dataverse.ConfigurationMigrationTool/Console.Tests/Features/Import/ImportTaskProcessorServiceTests.cs

Lines changed: 13 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -199,22 +199,21 @@ public async Task GivenASelfHiearchyEntityTaskImport_WhenExecuted_ThenItShouldPr
199199
Arg.Is<StringAttributeMetadata>(md => md.LogicalName == "name"),
200200
Arg.Is<Field>(f => f.Name == "name")).Returns(x => x.Arg<Field>().Value);
201201
metadataService.GetEntity(FakeSchemas.SelfHiearchyAccount.Name).Returns(FakeMetadata.Account);
202-
bulkOrganizationService.Upsert(Arg.Is<UpsertRequest>(r => r.Target.GetAttributeValue<EntityReference>("parentaccountid") != null))
203-
.Returns(x => new(new UpsertResponse() { ["Target"] = x.Arg<UpsertRequest>().Target.ToEntityReference() }));
202+
204203
// Act
205204
var result = await importService.Execute(task, dataImport);
206205
// Assert
207206
Received.InOrder(() =>
208207
{
209-
bulkOrganizationService.UpsertBulk(Arg.Is<IEnumerable<UpsertRequest>>(r => r.Count() == 1));
210-
bulkOrganizationService.Upsert(Arg.Is<UpsertRequest>(r => r.Target.Id == FakeDatasets.AccountIds[2]));
211-
bulkOrganizationService.Upsert(Arg.Is<UpsertRequest>(r => r.Target.Id == FakeDatasets.AccountIds[1]));
212-
bulkOrganizationService.Upsert(Arg.Is<UpsertRequest>(r => r.Target.Id == FakeDatasets.AccountIds[0]));
208+
bulkOrganizationService.UpsertBulk(Arg.Is<IEnumerable<UpsertRequest>>(r => r.First().Target.Id == FakeDatasets.AccountIds[3]));
209+
bulkOrganizationService.UpsertBulk(Arg.Is<IEnumerable<UpsertRequest>>(r => r.First().Target.Id == FakeDatasets.AccountIds[2]));
210+
bulkOrganizationService.UpsertBulk(Arg.Is<IEnumerable<UpsertRequest>>(r => r.First().Target.Id == FakeDatasets.AccountIds[1]));
211+
bulkOrganizationService.UpsertBulk(Arg.Is<IEnumerable<UpsertRequest>>(r => r.First().Target.Id == FakeDatasets.AccountIds[0]));
213212
});
214213
result.ShouldBe(TaskResult.Completed);
215214
}
216215
[Fact]
217-
public async Task GivenASelfHiearchyEntityTaskImportWothIssues_WhenExecuted_ThenItShouldProcessInCorrectOrderAndReturnFailed()
216+
public async Task GivenASelfHiearchyEntityTaskImportWithIssues_WhenExecuted_ThenItShouldProcessInCorrectOrderAndReturnFailed()
218217
{
219218
// Arrange
220219
var task = new ImportDataTask
@@ -237,8 +236,6 @@ public async Task GivenASelfHiearchyEntityTaskImportWothIssues_WhenExecuted_Then
237236
Arg.Is<StringAttributeMetadata>(md => md.LogicalName == "name"),
238237
Arg.Is<Field>(f => f.Name == "name")).Returns(x => x.Arg<Field>().Value);
239238
metadataService.GetEntity(FakeSchemas.SelfHiearchyAccount.Name).Returns(FakeMetadata.Account);
240-
bulkOrganizationService.Upsert(Arg.Is<UpsertRequest>(r => r.Target.GetAttributeValue<EntityReference>("parentaccountid") != null))
241-
.Returns(x => new(new OrganizationResponseFaultedResult() { Fault = fault, OriginalRequest = x.Arg<UpsertRequest>() }));
242239

243240
bulkOrganizationService.UpsertBulk(Arg.Is<IEnumerable<UpsertRequest>>(r => r.Count() == 1))
244241
.Returns(x => [new() { Fault = fault, OriginalRequest = x.Arg<IEnumerable<UpsertRequest>>().First() }]);
@@ -247,10 +244,10 @@ public async Task GivenASelfHiearchyEntityTaskImportWothIssues_WhenExecuted_Then
247244
// Assert
248245
Received.InOrder(() =>
249246
{
250-
bulkOrganizationService.UpsertBulk(Arg.Is<IEnumerable<UpsertRequest>>(r => r.Count() == 1));
251-
bulkOrganizationService.Upsert(Arg.Is<UpsertRequest>(r => r.Target.Id == FakeDatasets.AccountIds[2]));
252-
bulkOrganizationService.Upsert(Arg.Is<UpsertRequest>(r => r.Target.Id == FakeDatasets.AccountIds[1]));
253-
bulkOrganizationService.Upsert(Arg.Is<UpsertRequest>(r => r.Target.Id == FakeDatasets.AccountIds[0]));
247+
bulkOrganizationService.UpsertBulk(Arg.Is<IEnumerable<UpsertRequest>>(r => r.First().Target.Id == FakeDatasets.AccountIds[3]));
248+
bulkOrganizationService.UpsertBulk(Arg.Is<IEnumerable<UpsertRequest>>(r => r.First().Target.Id == FakeDatasets.AccountIds[2]));
249+
bulkOrganizationService.UpsertBulk(Arg.Is<IEnumerable<UpsertRequest>>(r => r.First().Target.Id == FakeDatasets.AccountIds[1]));
250+
bulkOrganizationService.UpsertBulk(Arg.Is<IEnumerable<UpsertRequest>>(r => r.First().Target.Id == FakeDatasets.AccountIds[0]));
254251
});
255252
result.ShouldBe(TaskResult.Failed);
256253
}
@@ -277,15 +274,12 @@ public async Task GivenACircularSelfHiearchyEntityTaskImport_WhenExecuted_ThenIt
277274
Arg.Is<StringAttributeMetadata>(md => md.LogicalName == "name"),
278275
Arg.Is<Field>(f => f.Name == "name")).Returns(x => x.Arg<Field>().Value);
279276
metadataService.GetEntity(FakeSchemas.SelfHiearchyAccount.Name).Returns(FakeMetadata.Account);
280-
bulkOrganizationService.Upsert(Arg.Is<UpsertRequest>(r => r.Target.GetAttributeValue<EntityReference>("parentaccountid") != null))
281-
.Returns(x => new(new UpsertResponse() { ["Target"] = x.Arg<UpsertRequest>().Target.ToEntityReference() }));
277+
282278
// Act
283279
var result = await importService.Execute(task, dataImport);
284280
// Assert
285-
await bulkOrganizationService.Received().UpsertBulk(Arg.Is<IEnumerable<UpsertRequest>>(r => r.Count() == 0));
286-
await bulkOrganizationService.DidNotReceive().Upsert(Arg.Any<UpsertRequest>());
287-
logger.ShouldHaveLogged(LogLevel.Warning, $"account({FakeDatasets.AccountIds[0]}) was skipped because his parent was not proccessed.", count: 1);
288-
logger.ShouldHaveLogged(LogLevel.Warning, $"account({FakeDatasets.AccountIds[1]}) was skipped because his parent was not proccessed.", count: 1);
281+
await bulkOrganizationService.DidNotReceive().UpsertBulk(Arg.Any<IEnumerable<UpsertRequest>>());
282+
logger.ShouldHaveLogged(LogLevel.Warning, "2 records skipped because of circular dependancies.", count: 1);
289283
result.ShouldBe(TaskResult.Completed);
290284
}
291285
}

src/Dataverse.ConfigurationMigrationTool/Console.Tests/Features/Import/Validators/Rules/EntitySchemas/FieldSchemas/FieldTypeMustMatchWithAttributeValidationRuleTests.cs

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,21 @@ public async Task GivenAFieldSchemaWithAttributeTypeCode_WhenItIsValidated_ThenI
2323
result.IsSuccess.ShouldBe(true);
2424
}
2525
[Fact]
26+
public async Task GivenAFieldSchemaWithAttributeTypeCodeMemo_WhenItIsValidated_ThenItShouldReturnSuccess()
27+
{
28+
// Arrange
29+
var fieldSchema = new FieldSchema
30+
{
31+
Name = "testField",
32+
Type = "string"
33+
};
34+
var attributeMetadata = CreateAttributeMetadata<MemoAttributeMetadata>();
35+
// Act
36+
var result = await ExecuteRule(fieldSchema, attributeMetadata);
37+
// Assert
38+
result.IsSuccess.ShouldBe(true);
39+
}
40+
[Fact]
2641
public async Task GivenAnUnresolvedFieldSchemaWithAttributeTypeCode_WhenItIsValidated_ThenItShouldReturnTheProperFailure()
2742
{
2843
var fieldSchema = new FieldSchema

src/Dataverse.ConfigurationMigrationTool/Dataverse.ConfigurationMigrationTool.Console/Features/Import/Commands/ImportCommands.cs

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,10 @@ public async Task Import([Option("schema")] string schemafilepath, [Option("data
3636
if (schemaValidationResult.IsError)
3737
{
3838
_logger.LogError("Schema failed validation process with {count} failure(s).", schemaValidationResult.Failures.Count);
39+
foreach (var failure in schemaValidationResult.Failures)
40+
{
41+
_logger.LogError("schema validation failure: {property} => {failure}", failure.PropertyBound, failure.Message);
42+
}
3943
throw new Exception("Provided Schema was not valid.");
4044
}
4145
_logger.LogInformation("Schema validation succeeded.");

src/Dataverse.ConfigurationMigrationTool/Dataverse.ConfigurationMigrationTool.Console/Features/Import/ImportTaskProcessorService.cs

Lines changed: 13 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -96,21 +96,10 @@ private async Task<TaskResult> ImportRelationships(EntityMetadata entity, Import
9696
}
9797
private async Task<TaskResult> ImportRecords(EntityMetadata entity, ImportDataTask task, EntityImport entityImport)
9898
{
99-
logger.LogInformation("Importing {entityname} records", entityImport.Name);
100-
var recordsWithNoSelfDependancies = entityImport.Records.Record.Where(r =>
101-
!r.Field.Any(f => f.Lookupentity == entityImport.Name &&
102-
entityImport.Records.Record.Any(r2 => r2.Id != r.Id && r2.Id.ToString() == f.Value))).Select(r => BuildUpsertRequest(entity, entityImport, r)).ToList();
103-
var recordsWithSelfDependancies = entityImport.Records.Record.Where(r =>
104-
r.Field.Any(f => f.Lookupentity == entityImport.Name &&
105-
entityImport.Records.Record.Any(r2 => r2.Id != r.Id && r2.Id.ToString() == f.Value))).ToList();
99+
logger.LogInformation("Importing {entityname} records ({count})", entityImport.Name, entityImport.Records.Record.Count);
106100

107-
logger.LogInformation("records with no self dependancies: {count}", recordsWithNoSelfDependancies.Count);
108-
logger.LogInformation("records with self dependancies: {count}", recordsWithSelfDependancies.Count);
109-
//See if upsert request keep ids
110-
111-
//implement parallelism and batching
112-
var responses = await bulkOrganizationService.UpsertBulk(recordsWithNoSelfDependancies);
113101

102+
var responses = await ProcessDependantRecords(entityImport.Records.Record, entity, entityImport);
114103
foreach (var response in responses)
115104
{
116105

@@ -122,66 +111,31 @@ private async Task<TaskResult> ImportRecords(EntityMetadata entity, ImportDataTa
122111

123112
}
124113
var resultTask = responses.Any() ? TaskResult.Failed : TaskResult.Completed;
125-
126-
var singleResponses = await ProcessDependantRecords(recordsWithSelfDependancies, entity, entityImport);
127-
foreach (var response in singleResponses)
128-
{
129-
130-
var targetRequest = (response.OriginalRequest as UpsertRequest).Target;
131-
132-
133-
logger.LogError("{logicalname}({id}) upsert failed because: {fault}", targetRequest.LogicalName, targetRequest.Id, response.Fault.Message);
134-
135-
136-
}
137-
resultTask = singleResponses.Any() ? TaskResult.Failed : resultTask;
138114
logger.LogInformation("Import Task of {entityname} records terminated in a {State} state", entityImport.Name, resultTask);
139115
return resultTask;
140116

141117
}
142118
private async Task<IEnumerable<OrganizationResponseFaultedResult>> ProcessDependantRecords(IEnumerable<Record> records, EntityMetadata entity, EntityImport entityImport)
143119
{
144120

145-
var retries = new Dictionary<Guid, int>();
146-
var queue = new Queue<Record>(records);
147121
var results = new List<OrganizationResponseFaultedResult>();
148-
while (queue.Count > 0)
122+
var recordsCanBeProcessed = records.Where(r => !r.Field.Any(f => f.Lookupentity == entityImport.Name &&
123+
records.Any(r2 => r2.Id != r.Id && r2.Id.ToString() == f.Value))).Select(r => BuildUpsertRequest(entity, entityImport, r)).ToList();
124+
logger.LogInformation("Processing {count} records", recordsCanBeProcessed.Count);
125+
if (recordsCanBeProcessed.Count == 0)
149126
{
150-
var record = queue.Dequeue();
151-
152-
if (record.Field.Any(f => f.Lookupentity == entityImport.Name && (queue.Any(r => r.Id.ToString() == f.Value) ||
153-
retries.Any(kv => kv.Key.ToString() == f.Value && kv.Value >= MAX_RETRIES))))
127+
if (records.Any())
154128
{
155-
156-
if (retries.ContainsKey(record.Id) && retries[record.Id] >= MAX_RETRIES)
157-
{
158-
logger.LogWarning("{entityType}({id}) was skipped because his parent was not proccessed.", entityImport.Name, record.Id);
159-
continue;
160-
}
161-
162-
163-
//Enqueue record again until his parent is processed.
164-
queue.Enqueue(record);
165-
retries[record.Id] = retries.ContainsKey(record.Id) ? retries[record.Id] + 1 : 1;
166-
continue;
129+
logger.LogWarning("{count} records skipped because of circular dependancies.", records.Count());
167130
}
168-
var request = BuildUpsertRequest(entity, entityImport, record);
169-
170-
var result = await bulkOrganizationService.Upsert(request);
171-
if (result.IsFailure)
172-
{
173-
results.Add(result.Failure);
174-
}
175-
176-
}
177-
var maxretries = retries.Where(kv => kv.Value >= MAX_RETRIES).Select(kv => kv.Key).ToList();
178-
if (maxretries.Any())
179-
{
180-
logger.LogWarning("The following records ({count}) were not processed due to circular dependencies: {ids}", maxretries.Count, string.Join(", ", maxretries));
131+
return results;
181132
}
133+
var responses = await bulkOrganizationService.UpsertBulk(recordsCanBeProcessed);
134+
results.AddRange(responses);
182135

136+
responses = await ProcessDependantRecords(records.Where(r => !recordsCanBeProcessed.Any(r2 => r.Id == r2.Target.Id)), entity, entityImport);
137+
results.AddRange(responses);
183138
return results;
184-
185139
}
186140
private UpsertRequest BuildUpsertRequest(EntityMetadata entityMD, EntityImport entityImport, Record record)
187141
{

src/Dataverse.ConfigurationMigrationTool/Dataverse.ConfigurationMigrationTool.Console/Features/Import/Validators/Rules/EntitySchemas/FieldSchemas/FieldTypeMustMatchWithAttributeValidationRule.cs

Lines changed: 17 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -3,25 +3,28 @@
33
using Dataverse.ConfigurationMigrationTool.Console.Features.Shared;
44
using Microsoft.Xrm.Sdk.Metadata;
55

6-
namespace Dataverse.ConfigurationMigrationTool.Console.Features.Import.Validators.Rules.EntitySchemas.FieldSchemas
6+
namespace Dataverse.ConfigurationMigrationTool.Console.Features.Import.Validators.Rules.EntitySchemas.FieldSchemas;
7+
8+
public class FieldTypeMustMatchWithAttributeValidationRule : IFieldSchemaValidationRule
79
{
8-
public class FieldTypeMustMatchWithAttributeValidationRule : IFieldSchemaValidationRule
10+
private static IMapper<FieldSchema, AttributeTypeCode?> AttributeTypeMapper = new FieldSchemaToAttributeTypeMapper();
11+
public async Task<RuleResult> Validate(FieldSchema fieldSchema, AttributeMetadata attributeMetadata)
912
{
10-
private static IMapper<FieldSchema, AttributeTypeCode?> AttributeTypeMapper = new FieldSchemaToAttributeTypeMapper();
11-
public async Task<RuleResult> Validate(FieldSchema fieldSchema, AttributeMetadata attributeMetadata)
13+
var schemafieldtype = AttributeTypeMapper.Map(fieldSchema);
14+
if (schemafieldtype == null)
1215
{
13-
var schemafieldtype = AttributeTypeMapper.Map(fieldSchema);
14-
if (schemafieldtype == null)
15-
{
16-
return RuleResult.Failure($"Schema Field type {fieldSchema.Type} is not currently supported.");
17-
18-
}
16+
return RuleResult.Failure($"Schema Field type {fieldSchema.Type} is not currently supported.");
1917

20-
if (schemafieldtype.Value != attributeMetadata.AttributeType)
21-
{
22-
return RuleResult.Failure($"Attribute {fieldSchema.Name} is type of {schemafieldtype} but it's expected to be {attributeMetadata.AttributeType}");
23-
}
18+
}
19+
if (schemafieldtype == AttributeTypeCode.String && attributeMetadata.AttributeType == AttributeTypeCode.Memo)
20+
{
21+
// Special case for Memo, which is a long text field in Dataverse
2422
return RuleResult.Success();
2523
}
24+
if (schemafieldtype.Value != attributeMetadata.AttributeType)
25+
{
26+
return RuleResult.Failure($"Attribute {fieldSchema.Name} is type of {schemafieldtype} but it's expected to be {attributeMetadata.AttributeType}");
27+
}
28+
return RuleResult.Success();
2629
}
2730
}

0 commit comments

Comments
 (0)