Skip to content

Commit d8e490a

Browse files
author
Jaden Peterson
committed
Don't write to the temporary directory in ZincRunner
1 parent 269f41e commit d8e490a

File tree

2 files changed

+109
-3
lines changed

2 files changed

+109
-3
lines changed

src/main/scala/higherkindness/rules_scala/workers/common/AnalysisUtil.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@ package workers.common
44
import java.io.File
55
import java.nio.file.{Path, Paths}
66
import sbt.internal.inc.Analysis
7-
import sbt.internal.inc.consistent.ConsistentFileAnalysisStore
87
import xsbti.compile.AnalysisStore
98
import xsbti.compile.analysis.ReadWriteMappers
109

@@ -29,13 +28,12 @@ object AnalysisUtil {
2928
ConsistentFileAnalysisStore.text(
3029
analysisStoreFile,
3130
readWriteMappers,
32-
sort = true,
3331
)
3432
} else {
3533
ConsistentFileAnalysisStore.binary(
3634
analysisStoreFile,
3735
readWriteMappers,
38-
sort = true,
36+
reproducible = true,
3937
)
4038
}
4139
}
Lines changed: 108 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,108 @@
1+
/*
2+
* Zinc - The incremental compiler for Scala.
3+
* Copyright Scala Center, Lightbend, and Mark Harrah
4+
*
5+
* Licensed under Apache License 2.0
6+
* SPDX-License-Identifier: Apache-2.0
7+
*
8+
* See the NOTICE file distributed with this work for
9+
* additional information regarding copyright ownership.
10+
*/
11+
12+
package higherkindness.rules_scala
13+
package workers.common
14+
15+
import java.io.{File, FileInputStream, FileOutputStream}
16+
import java.util.Optional
17+
import sbt.internal.inc.consistent.{ConsistentAnalysisFormat, Deserializer, ParallelGzipOutputStream, Serializer, SerializerFactory}
18+
import sbt.io.{IO, Using}
19+
import scala.jdk.OptionConverters.*
20+
import scala.util.control.Exception.allCatch
21+
import xsbti.compile.analysis.ReadWriteMappers
22+
import xsbti.compile.{AnalysisContents, AnalysisStore => XAnalysisStore}
23+
24+
/**
25+
* This is a modified version of Zinc's [[ConsistentFileAnalysisStore]], which you can view here:
26+
* [[https://github.com/sbt/zinc/blob/1.10.x/internal/zinc-persist/src/main/scala/sbt/internal/inc/consistent/ConsistentFileAnalysisStore.scala]]
27+
*
28+
* The only difference is that it doesn't write the analysis store to the temporary directory before copying it to the
29+
* destination. Doing so constitutes a write outside the sandbox directory provided during the work request, which
30+
* violates the Bazel multiplex worker protocol.
31+
*/
32+
object ConsistentFileAnalysisStore {
33+
def text(
34+
file: File,
35+
mappers: ReadWriteMappers,
36+
reproducible: Boolean = true,
37+
parallelism: Int = Runtime.getRuntime.availableProcessors(),
38+
): XAnalysisStore =
39+
new AStore(
40+
file,
41+
new ConsistentAnalysisFormat(mappers, reproducible),
42+
SerializerFactory.text,
43+
parallelism,
44+
)
45+
46+
def binary(file: File): XAnalysisStore =
47+
binary(
48+
file,
49+
mappers = ReadWriteMappers.getEmptyMappers(),
50+
reproducible = true,
51+
)
52+
53+
def binary(
54+
file: File,
55+
mappers: ReadWriteMappers,
56+
): XAnalysisStore =
57+
binary(
58+
file,
59+
mappers,
60+
reproducible = true,
61+
)
62+
63+
def binary(
64+
file: File,
65+
mappers: ReadWriteMappers,
66+
reproducible: Boolean,
67+
parallelism: Int = Runtime.getRuntime.availableProcessors(),
68+
): XAnalysisStore =
69+
new AStore(
70+
file,
71+
new ConsistentAnalysisFormat(mappers, reproducible),
72+
SerializerFactory.binary,
73+
parallelism,
74+
)
75+
76+
private final class AStore[S <: Serializer, D <: Deserializer](
77+
file: File,
78+
format: ConsistentAnalysisFormat,
79+
sf: SerializerFactory[S, D],
80+
parallelism: Int = Runtime.getRuntime.availableProcessors(),
81+
) extends XAnalysisStore {
82+
83+
def set(analysisContents: AnalysisContents): Unit = {
84+
val analysis = analysisContents.getAnalysis
85+
val setup = analysisContents.getMiniSetup
86+
if (!file.getParentFile.exists()) file.getParentFile.mkdirs()
87+
val fout = new FileOutputStream(file)
88+
try {
89+
val gout = new ParallelGzipOutputStream(fout, parallelism)
90+
val ser = sf.serializerFor(gout)
91+
format.write(ser, analysis, setup)
92+
gout.close()
93+
} finally fout.close
94+
}
95+
96+
def get(): Optional[AnalysisContents] = {
97+
import sbt.internal.inc.JavaInterfaceUtil.EnrichOption
98+
allCatch.opt(unsafeGet()).toJava
99+
}
100+
101+
def unsafeGet(): AnalysisContents =
102+
Using.gzipInputStream(new FileInputStream(file)) { in =>
103+
val deser = sf.deserializerFor(in)
104+
val (analysis, setup) = format.read(deser)
105+
AnalysisContents.create(analysis, setup)
106+
}
107+
}
108+
}

0 commit comments

Comments
 (0)