Skip to content

Commit 5567d28

Browse files
authored
Merge pull request #345 from modelix/fix/bulk-sync-large-modules
MODELIX-640 Handle large modules in bulk sync
2 parents d029ea9 + 75a5afb commit 5567d28

File tree

56 files changed

+834
-1547
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

56 files changed

+834
-1547
lines changed

bulk-model-sync-gradle/src/main/kotlin/org/modelix/model/sync/bulk/gradle/tasks/ImportIntoModelServer.kt

Lines changed: 18 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -30,9 +30,12 @@ import org.gradle.api.tasks.TaskAction
3030
import org.modelix.model.ModelFacade
3131
import org.modelix.model.api.ILanguage
3232
import org.modelix.model.api.ILanguageRepository
33-
import org.modelix.model.client2.ModelClientV2PlatformSpecificBuilder
33+
import org.modelix.model.api.INode
34+
import org.modelix.model.api.PNodeAdapter
35+
import org.modelix.model.client2.ModelClientV2
3436
import org.modelix.model.client2.runWrite
3537
import org.modelix.model.lazy.RepositoryId
38+
import org.modelix.model.operations.OTBranch
3639
import org.modelix.model.sync.bulk.ModelImporter
3740
import org.modelix.model.sync.bulk.importFilesAsRootChildren
3841
import org.modelix.model.sync.bulk.isModuleIncluded
@@ -75,7 +78,7 @@ abstract class ImportIntoModelServer @Inject constructor(of: ObjectFactory) : De
7578
val repoId = RepositoryId(repositoryId.get())
7679

7780
val branchRef = ModelFacade.createBranchReference(repoId, branchName.get())
78-
val client = ModelClientV2PlatformSpecificBuilder().url(url.get()).build()
81+
val client = ModelClientV2.builder().url(url.get()).build()
7982
val files = inputDir.listFiles()?.filter {
8083
it.extension == "json" && isModuleIncluded(it.nameWithoutExtension, includedModules.get(), includedModulePrefixes.get())
8184
}
@@ -84,11 +87,20 @@ abstract class ImportIntoModelServer @Inject constructor(of: ObjectFactory) : De
8487
runBlocking {
8588
client.init()
8689
client.runWrite(branchRef) { rootNode ->
87-
logger.info("Got root node: {}", rootNode)
88-
logger.info("Importing...")
89-
ModelImporter(rootNode, continueOnError.get()).importFilesAsRootChildren(files)
90-
logger.info("Import finished")
90+
rootNode.runBulkUpdate {
91+
logger.info("Got root node: {}", rootNode)
92+
logger.info("Importing...")
93+
ModelImporter(rootNode, continueOnError.get()).importFilesAsRootChildren(files)
94+
logger.info("Import finished")
95+
}
9196
}
9297
}
9398
}
9499
}
100+
101+
/**
102+
* Memory optimization that doesn't record individual change operations, but only the result.
103+
*/
104+
private fun INode.runBulkUpdate(body: () -> Unit) {
105+
((this as PNodeAdapter).branch as OTBranch).runBulkUpdate(body = body)
106+
}

bulk-model-sync-lib/src/commonMain/kotlin/org/modelix/model/sync/bulk/ModelImporter.kt

Lines changed: 41 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ class ModelImporter(private val root: INode, private val continueOnError: Boolea
5151
// updated to the constructor with two arguments.
5252
constructor(root: INode) : this(root, false)
5353

54-
private fun doAndPotentiallyContinueOnErrors(block: () -> Unit) {
54+
private inline fun doAndPotentiallyContinueOnErrors(block: () -> Unit) {
5555
try {
5656
block()
5757
} catch (e: Exception) {
@@ -70,34 +70,38 @@ class ModelImporter(private val root: INode, private val continueOnError: Boolea
7070
*/
7171
@JvmName("importData")
7272
fun import(data: ModelData) {
73-
logImportSize(data.root, logger)
74-
logger.info { "Building indices for import..." }
75-
originalIdToExisting.clear()
76-
postponedReferences.clear()
77-
nodesToRemove.clear()
78-
numExpectedNodes = countExpectedNodes(data.root)
79-
currentNodeProgress = 0
80-
buildExistingIndex(root)
81-
82-
logger.info { "Importing nodes..." }
83-
data.root.originalId()?.let { originalIdToExisting[it] = root }
84-
syncNode(root, data.root)
85-
86-
logger.info { "Synchronizing references..." }
87-
postponedReferences.forEach {
88-
doAndPotentiallyContinueOnErrors {
89-
it.invoke()
73+
INodeResolutionScope.runWithAdditionalScope(root.getArea()) {
74+
logImportSize(data.root, logger)
75+
logger.info { "Building indices for import..." }
76+
originalIdToExisting.clear()
77+
postponedReferences.clear()
78+
nodesToRemove.clear()
79+
numExpectedNodes = countExpectedNodes(data.root)
80+
currentNodeProgress = 0
81+
buildExistingIndex(root)
82+
83+
logger.info { "Importing nodes..." }
84+
data.root.originalId()?.let { originalIdToExisting[it] = root }
85+
syncNode(root, data.root)
86+
87+
logger.info { "Synchronizing references..." }
88+
postponedReferences.forEach {
89+
doAndPotentiallyContinueOnErrors {
90+
it.invoke()
91+
}
9092
}
91-
}
9293

93-
logger.info { "Removing extra nodes..." }
94-
nodesToRemove.forEach {
95-
doAndPotentiallyContinueOnErrors {
96-
it.remove()
94+
logger.info { "Removing extra nodes..." }
95+
nodesToRemove.forEach {
96+
doAndPotentiallyContinueOnErrors {
97+
if (it.isValid) { // if it's invalid then it's already removed
98+
it.remove()
99+
}
100+
}
97101
}
98-
}
99102

100-
logger.info { "Synchronization finished." }
103+
logger.info { "Synchronization finished." }
104+
}
101105
}
102106

103107
private fun countExpectedNodes(data: NodeData): Int =
@@ -110,9 +114,7 @@ class ModelImporter(private val root: INode, private val continueOnError: Boolea
110114
doAndPotentiallyContinueOnErrors {
111115
syncProperties(node, data)
112116
syncChildren(node, data)
113-
INodeResolutionScope.runWithAdditionalScope(node.getArea()) {
114-
syncReferences(node, data)
115-
}
117+
syncReferences(node, data)
116118
}
117119
}
118120

@@ -122,6 +124,17 @@ class ModelImporter(private val root: INode, private val continueOnError: Boolea
122124
val expectedNodes = data.children.filter { it.role == role }
123125
val existingNodes = node.getChildren(role).toList()
124126

127+
// optimization that uses the bulk operation .addNewChildren
128+
if (existingNodes.isEmpty() && expectedNodes.all { originalIdToExisting[it.originalId()] == null }) {
129+
node.addNewChildren(role, -1, expectedNodes.map { it.concept?.let { ConceptReference(it) } }).zip(expectedNodes).forEach { (newChild, expected) ->
130+
val expectedId = checkNotNull(expected.originalId()) { "Specified node '$expected' has no id" }
131+
newChild.setPropertyValue(NodeData.idPropertyKey, expectedId)
132+
originalIdToExisting[expectedId] = newChild
133+
syncNode(newChild, expected)
134+
}
135+
continue
136+
}
137+
125138
// optimization for when there is no change in the child list
126139
// size check first to avoid querying the original ID
127140
if (expectedNodes.size == existingNodes.size && expectedNodes.map { it.originalId() } == existingNodes.map { it.originalId() }) {

bulk-model-sync-lib/src/jvmMain/kotlin/org/modelix/model/sync/bulk/ModelExporter.kt

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,9 @@
1616

1717
package org.modelix.model.sync.bulk
1818

19+
import kotlinx.serialization.ExperimentalSerializationApi
20+
import kotlinx.serialization.json.Json
21+
import kotlinx.serialization.json.encodeToStream
1922
import org.modelix.model.api.INode
2023
import org.modelix.model.data.ModelData
2124
import java.io.File
@@ -27,9 +30,11 @@ actual class ModelExporter actual constructor(private val root: INode) {
2730
*
2831
* @param outputFile target file of the export
2932
*/
33+
@OptIn(ExperimentalSerializationApi::class)
3034
fun export(outputFile: File) {
31-
val modelData = ModelData(root = root.asExported())
3235
outputFile.parentFile.mkdirs()
33-
outputFile.writeText(modelData.toJson())
36+
37+
val modelData = ModelData(root = root.asExported())
38+
Json.encodeToStream(modelData, outputFile.outputStream())
3439
}
3540
}

bulk-model-sync-lib/src/jvmMain/kotlin/org/modelix/model/sync/bulk/PlatformSpecific.kt

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,9 @@
1616

1717
package org.modelix.model.sync.bulk
1818

19+
import kotlinx.serialization.ExperimentalSerializationApi
20+
import kotlinx.serialization.json.Json
21+
import kotlinx.serialization.json.decodeFromStream
1922
import org.modelix.model.data.ModelData
2023
import java.io.File
2124

@@ -35,8 +38,11 @@ fun ModelImporter.import(jsonFile: File) {
3538
import(data)
3639
}
3740

41+
@OptIn(ExperimentalSerializationApi::class)
3842
fun ModelImporter.importFilesAsRootChildren(files: Collection<File>) {
39-
val models = files.map { ModelData.fromJson(it.readText()) }
43+
val models: List<ModelData> = files.map {
44+
Json.decodeFromStream(it.inputStream())
45+
}
4046
import(mergeModelData(models))
4147
}
4248

bulk-model-sync-mps/src/main/java/org/modelix/mps/model/sync/bulk/MPSBulkSynchronizer.java

Lines changed: 0 additions & 137 deletions
This file was deleted.

0 commit comments

Comments
 (0)