@@ -45,6 +45,29 @@ trait GroupExecution {
4545 val effectiveThreadCount : Int =
4646 ec.map(_.getMaximumPoolSize).getOrElse(1 )
4747
48+ private val envVarsForInterpolation = Seq (
49+ " PWD" -> workspace.toString,
50+ " PWD_URI" -> workspace.toURI.toString,
51+ " MILL_VERSION" -> mill.constants.BuildInfo .millVersion,
52+ " MILL_BIN_PLATFORM" -> mill.constants.BuildInfo .millBinPlatform
53+ )
54+
55+ /** Recursively examine all `ujson.Str` values and replace '${VAR}' patterns. */
56+ private def interpolateEnvVarsInJson (json : ujson.Value ): ujson.Value = {
57+ import scala .jdk .CollectionConverters .*
58+ val envWithPwd = (env ++ envVarsForInterpolation).asJava
59+
60+ // recursively convert java data structure to ujson.Value
61+ def rec (json : ujson.Value ): ujson.Value = json match {
62+ case ujson.Str (s) => mill.constants.Util .interpolateEnvVars(s, envWithPwd)
63+ case ujson.Arr (xs) => ujson.Arr (xs.map(rec))
64+ case ujson.Obj (kvs) => ujson.Obj .from(kvs.map((k, v) => (k, rec(v))))
65+ case v => v
66+ }
67+
68+ rec(json)
69+ }
70+
4871 // those result which are inputs but not contained in this terminal group
4972 def executeGroupCached (
5073 terminal : Task [? ],
@@ -62,61 +85,46 @@ trait GroupExecution {
6285 upstreamPathRefs : Seq [PathRef ]
6386 ): GroupExecution .Results = MappedRoots .withMillDefaults(outPath = outPath) {
6487
65- val externalInputsHash = MurmurHash3 .orderedHash(
66- group.flatMap(_.inputs).filter(! group.contains(_))
67- .flatMap(results(_).asSuccess.map(_.value._2))
68- )
69-
70- val sideHashes = MurmurHash3 .orderedHash(group.iterator.map(_.sideHash))
71-
72- val scriptsHash = MurmurHash3 .orderedHash(
73- group
74- .iterator
75- .collect { case namedTask : Task .Named [_] =>
76- CodeSigUtils .codeSigForTask(
77- namedTask,
78- classToTransitiveClasses,
79- allTransitiveClassMethods,
80- codeSignatures,
81- constructorHashSignatures
82- )
83- }
84- .flatten
85- )
88+ val inputsHash = {
89+ val externalInputsHash = MurmurHash3 .orderedHash(
90+ group.flatMap(_.inputs).filter(! group.contains(_))
91+ .flatMap(results(_).asSuccess.map(_.value._2))
92+ )
93+ val sideHashes = MurmurHash3 .orderedHash(group.iterator.map(_.sideHash))
94+ val scriptsHash = MurmurHash3 .orderedHash(
95+ group
96+ .iterator
97+ .collect { case namedTask : Task .Named [_] =>
98+ CodeSigUtils .codeSigForTask(
99+ namedTask,
100+ classToTransitiveClasses,
101+ allTransitiveClassMethods,
102+ codeSignatures,
103+ constructorHashSignatures
104+ )
105+ }
106+ .flatten
107+ )
108+ // the JVM running this code currently
109+ val javaHomeHash = sys.props(" java.home" ).hashCode
86110
87- val javaHomeHash = sys.props(" java.home" ).hashCode
88- val inputsHash =
89111 externalInputsHash + sideHashes + classLoaderSigHash + scriptsHash + javaHomeHash
112+ }
90113
91114 terminal match {
92115
93116 case labelled : Task .Named [_] =>
94117
95- // recursively convert java data structure to ujson.Value
96- val envWithPwd = env ++ Seq (
97- " PWD" -> workspace.toString,
98- " PWD_URI" -> workspace.toURI.toString,
99- " MILL_VERSION" -> mill.constants.BuildInfo .millVersion,
100- " MILL_BIN_PLATFORM" -> mill.constants.BuildInfo .millBinPlatform
101- )
102-
103118 labelled.ctx.segments.last.value match {
119+ // apply build override
104120 case single if labelled.ctx.enclosingModule.buildOverrides.contains(single) =>
105- val jsonData = labelled.ctx.enclosingModule.buildOverrides(single)
106-
107- import scala .jdk .CollectionConverters .*
108- def rec (x : ujson.Value ): ujson.Value = x match {
109- case ujson.Str (s) => mill.constants.Util .interpolateEnvVars(s, envWithPwd.asJava)
110- case ujson.Arr (xs) => ujson.Arr (xs.map(rec))
111- case ujson.Obj (kvs) => ujson.Obj .from(kvs.map((k, v) => (k, rec(v))))
112- case v => v
113- }
114121
122+ val jsonData = labelled.ctx.enclosingModule.buildOverrides(single)
115123 val (execRes, serializedPaths) =
116124 try {
117125 val (resultData, serializedPaths) = PathRef .withSerializedPaths {
118126 PathRef .currentOverrideModulePath.withValue(labelled.ctx.millSourcePath) {
119- upickle.read[Any ](rec (jsonData))(
127+ upickle.read[Any ](interpolateEnvVarsInJson (jsonData))(
120128 using labelled.readWriterOpt.get.asInstanceOf [upickle.Reader [Any ]]
121129 )
122130 }
@@ -133,14 +141,16 @@ trait GroupExecution {
133141 }
134142
135143 GroupExecution .Results (
136- Map (labelled -> execRes),
137- Nil ,
144+ newResults = Map (labelled -> execRes),
145+ newEvaluated = Nil ,
138146 cached = true ,
139- inputsHash,
140- - 1 ,
141- false ,
142- serializedPaths
147+ inputsHash = inputsHash ,
148+ previousInputsHash = - 1 ,
149+ valueHashChanged = false ,
150+ serializedPaths = serializedPaths
143151 )
152+
153+ // no build overrides
144154 case _ =>
145155 val out = if (! labelled.ctx.external) outPath else externalOutPath
146156 val paths = ExecutionPaths .resolve(out, labelled.ctx.segments)
@@ -149,18 +159,18 @@ trait GroupExecution {
149159 // `cached.isEmpty` means worker metadata file removed by user so recompute the worker
150160 val (multiLogger, fileLoggerOpt) = resolveLogger(Some (paths).map(_.log), logger)
151161 val upToDateWorker = loadUpToDateWorker(
152- logger,
153- inputsHash,
154- labelled,
155- cached.isEmpty,
156- deps,
157- Some (paths),
158- upstreamPathRefs,
159- exclusive,
160- multiLogger,
161- countMsg,
162- new GroupExecution .DestCreator (Some (paths)),
163- terminal
162+ logger = logger ,
163+ inputsHash = inputsHash ,
164+ labelled = labelled ,
165+ forceDiscard = cached.isEmpty,
166+ deps = deps ,
167+ paths = Some (paths),
168+ upstreamPathRefs = upstreamPathRefs ,
169+ exclusive = exclusive ,
170+ multiLogger = multiLogger ,
171+ counterMsg = countMsg,
172+ destCreator = new GroupExecution .DestCreator (Some (paths)),
173+ terminal = terminal
164174 )
165175
166176 val cachedValueAndHash =
@@ -176,13 +186,13 @@ trait GroupExecution {
176186 Map (labelled -> res)
177187
178188 GroupExecution .Results (
179- newResults,
180- Nil ,
189+ newResults = newResults ,
190+ newEvaluated = Nil ,
181191 cached = true ,
182- inputsHash,
183- - 1 ,
192+ inputsHash = inputsHash ,
193+ previousInputsHash = - 1 ,
184194 valueHashChanged = false ,
185- serializedPaths
195+ serializedPaths = serializedPaths
186196 )
187197
188198 case _ =>
@@ -224,13 +234,13 @@ trait GroupExecution {
224234 }
225235
226236 GroupExecution .Results (
227- newResults,
228- newEvaluated.toSeq,
237+ newResults = newResults ,
238+ newEvaluated = newEvaluated .toSeq,
229239 cached = if (labelled.isInstanceOf [Task .Input [? ]]) null else false ,
230- inputsHash,
231- cached.map(_.inputHash).getOrElse(- 1 ),
232- ! cached.map(_.valueHash).contains(valueHash),
233- serializedPaths
240+ inputsHash = inputsHash ,
241+ previousInputsHash = cached.map(_.inputHash).getOrElse(- 1 ),
242+ valueHashChanged = ! cached.map(_.valueHash).contains(valueHash),
243+ serializedPaths = serializedPaths
234244 )
235245 }
236246 }
@@ -252,11 +262,11 @@ trait GroupExecution {
252262 terminal = terminal
253263 )
254264 GroupExecution .Results (
255- newResults,
256- newEvaluated.toSeq,
257- null ,
258- inputsHash,
259- - 1 ,
265+ newResults = newResults ,
266+ newEvaluated = newEvaluated .toSeq,
267+ cached = null ,
268+ inputsHash = inputsHash ,
269+ previousInputsHash = - 1 ,
260270 valueHashChanged = false ,
261271 serializedPaths = Nil
262272 )
@@ -312,20 +322,20 @@ trait GroupExecution {
312322 )
313323
314324 GroupExecution .wrap(
315- workspace,
316- deps,
317- outPath,
318- paths,
319- upstreamPathRefs,
320- exclusive,
321- multiLogger,
322- logger,
323- exclusiveSystemStreams,
324- counterMsg,
325- destCreator,
326- getEvaluator().asInstanceOf [Evaluator ],
327- terminal,
328- rootModule.getClass.getClassLoader
325+ workspace = workspace ,
326+ deps = deps ,
327+ outPath = outPath ,
328+ paths = paths ,
329+ upstreamPathRefs = upstreamPathRefs ,
330+ exclusive = exclusive ,
331+ multiLogger = multiLogger ,
332+ logger = logger ,
333+ exclusiveSystemStreams = exclusiveSystemStreams ,
334+ counterMsg = counterMsg ,
335+ destCreator = destCreator ,
336+ evaluator = getEvaluator().asInstanceOf [Evaluator ],
337+ terminal = terminal ,
338+ classLoader = rootModule.getClass.getClassLoader
329339 ) {
330340 try {
331341 task.evaluate(args) match {
@@ -564,7 +574,7 @@ object GroupExecution {
564574 def onRead (path : os.ReadablePath ): Unit = path match {
565575 case path : os.Path =>
566576 if (! isCommand && ! isInput && mill.api.FilesystemCheckerEnabled .value) {
567- if (path.startsWith(workspace) && ! validReadDests.exists(path.startsWith(_) )) {
577+ if (path.startsWith(workspace) && ! validReadDests.exists(path.startsWith)) {
568578 sys.error(
569579 s " Reading from ${path.relativeTo(workspace)} not allowed during execution of ` $terminal`. \n " +
570580 " You can only read files referenced by `Task.Source` or `Task.Sources`, or within a `Task.Input"
@@ -576,7 +586,7 @@ object GroupExecution {
576586
577587 def onWrite (path : os.Path ): Unit = {
578588 if (! isCommand && mill.api.FilesystemCheckerEnabled .value) {
579- if (path.startsWith(workspace) && ! validWriteDests.exists(path.startsWith(_) )) {
589+ if (path.startsWith(workspace) && ! validWriteDests.exists(path.startsWith)) {
580590 sys.error(
581591 s " Writing to ${path.relativeTo(workspace)} not allowed during execution of ` $terminal`. \n " +
582592 " Normal `Task`s can only write to files within their `Task.dest` folder, only `Task.Command`s can write to other arbitrary files."
0 commit comments