1818
1919import java .io .IOException ;
2020import java .nio .charset .StandardCharsets ;
21+ import java .time .Instant ;
2122import java .util .ArrayList ;
2223import java .util .List ;
2324import java .util .function .Consumer ;
@@ -39,16 +40,25 @@ public static Builder newBuilder() {
3940
4041 public static class Builder {
4142
42- private String fieldName = "incrementalWriteHash" ;
43+ private String hashKeyName = "incrementalWriteHash" ;
44+ private String timestampKeyName = "incrementalWriteTimestamp" ;
4345 private boolean canonicalizeJson = true ;
4446 private boolean useEvalQuery = false ;
4547 private Consumer <DocumentWriteOperation []> skippedDocumentsConsumer ;
4648
4749 /**
48- * @param fieldName the name of the MarkLogic field that will hold the hash value; defaults to "incrementalWriteHash".
50+ * @param keyName the name of the MarkLogic metadata key that will hold the hash value; defaults to "incrementalWriteHash".
4951 */
50- public Builder fieldName (String fieldName ) {
51- this .fieldName = fieldName ;
52+ public Builder hashKeyName (String keyName ) {
53+ this .hashKeyName = keyName ;
54+ return this ;
55+ }
56+
57+ /**
58+ * @param keyName the name of the MarkLogic metadata key that will hold the timestamp value; defaults to "incrementalWriteTimestamp".
59+ */
60+ public Builder timestampKeyName (String keyName ) {
61+ this .timestampKeyName = keyName ;
5262 return this ;
5363 }
5464
@@ -79,29 +89,32 @@ public Builder onDocumentsSkipped(Consumer<DocumentWriteOperation[]> skippedDocu
7989
8090 public IncrementalWriteFilter build () {
8191 if (useEvalQuery ) {
82- return new IncrementalWriteEvalFilter (fieldName , canonicalizeJson , skippedDocumentsConsumer );
92+ return new IncrementalWriteEvalFilter (hashKeyName , timestampKeyName , canonicalizeJson , skippedDocumentsConsumer );
8393 }
84- return new IncrementalWriteOpticFilter (fieldName , canonicalizeJson , skippedDocumentsConsumer );
94+ return new IncrementalWriteOpticFilter (hashKeyName , timestampKeyName , canonicalizeJson , skippedDocumentsConsumer );
8595 }
8696 }
8797
88- protected final String fieldName ;
98+ protected final String hashKeyName ;
99+ private final String timestampKeyName ;
89100 private final boolean canonicalizeJson ;
90101 private final Consumer <DocumentWriteOperation []> skippedDocumentsConsumer ;
91102
92103 // Hardcoding this for now, with a good general purpose hashing function.
93104 // See https://xxhash.com for benchmarks.
94105 private final LongHashFunction hashFunction = LongHashFunction .xx3 ();
95106
96- public IncrementalWriteFilter (String fieldName , boolean canonicalizeJson , Consumer <DocumentWriteOperation []> skippedDocumentsConsumer ) {
97- this .fieldName = fieldName ;
107+ public IncrementalWriteFilter (String hashKeyName , String timestampKeyName , boolean canonicalizeJson , Consumer <DocumentWriteOperation []> skippedDocumentsConsumer ) {
108+ this .hashKeyName = hashKeyName ;
109+ this .timestampKeyName = timestampKeyName ;
98110 this .canonicalizeJson = canonicalizeJson ;
99111 this .skippedDocumentsConsumer = skippedDocumentsConsumer ;
100112 }
101113
102114 protected final DocumentWriteSet filterDocuments (Context context , Function <String , String > hashRetriever ) {
103115 final DocumentWriteSet newWriteSet = context .getDatabaseClient ().newDocumentManager ().newWriteSet ();
104116 final List <DocumentWriteOperation > skippedDocuments = new ArrayList <>();
117+ final String timestamp = Instant .now ().toString ();
105118
106119 for (DocumentWriteOperation doc : context .getDocumentWriteSet ()) {
107120 if (!DocumentWriteOperation .OperationType .DOCUMENT_WRITE .equals (doc .getOperationType ())) {
@@ -117,14 +130,14 @@ protected final DocumentWriteSet filterDocuments(Context context, Function<Strin
117130
118131 if (existingHash != null ) {
119132 if (!existingHash .equals (contentHash )) {
120- newWriteSet .add (addHashToMetadata (doc , fieldName , contentHash ));
133+ newWriteSet .add (addHashToMetadata (doc , hashKeyName , contentHash , timestampKeyName , timestamp ));
121134 } else if (skippedDocumentsConsumer != null ) {
122135 skippedDocuments .add (doc );
123136 } else {
124137 // No consumer, so skip the document silently.
125138 }
126139 } else {
127- newWriteSet .add (addHashToMetadata (doc , fieldName , contentHash ));
140+ newWriteSet .add (addHashToMetadata (doc , hashKeyName , contentHash , timestampKeyName , timestamp ));
128141 }
129142 }
130143
@@ -173,7 +186,8 @@ private String computeHash(String content) {
173186 return Long .toHexString (hash );
174187 }
175188
176- protected static DocumentWriteOperation addHashToMetadata (DocumentWriteOperation op , String fieldName , String hash ) {
189+ protected static DocumentWriteOperation addHashToMetadata (DocumentWriteOperation op , String hashKeyName , String hash ,
190+ String timestampKeyName , String timestamp ) {
177191 DocumentMetadataHandle newMetadata = new DocumentMetadataHandle ();
178192 if (op .getMetadata () != null ) {
179193 DocumentMetadataHandle originalMetadata = (DocumentMetadataHandle ) op .getMetadata ();
@@ -183,7 +197,10 @@ protected static DocumentWriteOperation addHashToMetadata(DocumentWriteOperation
183197 newMetadata .setProperties (originalMetadata .getProperties ());
184198 newMetadata .getMetadataValues ().putAll (originalMetadata .getMetadataValues ());
185199 }
186- newMetadata .getMetadataValues ().put (fieldName , hash );
200+
201+ newMetadata .getMetadataValues ().put (hashKeyName , hash );
202+ newMetadata .getMetadataValues ().put (timestampKeyName , timestamp );
203+
187204 return new DocumentWriteOperationImpl (op .getUri (), newMetadata , op .getContent (), op .getTemporalDocumentURI ());
188205 }
189206}
0 commit comments