Skip to content

Commit 2d57b46

Browse files
committed
fix: fetch compiler-bridge sources for offline builds
sbt compiles the compiler-bridge from sources but doesn't persist the sources jar in the coursier cache. This causes offline builds to fail when there are .scala files in project/ that require bridge compilation. After sbt runs, detect which compiler-bridge versions were downloaded and explicitly fetch both sources AND their transitive dependencies using coursier CLI.
1 parent 65f6702 commit 2d57b46

File tree

1 file changed

+77
-0
lines changed

1 file changed

+77
-0
lines changed

scripts/generate-lockfile.py

Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,69 @@ def assert_no_ivy_artifacts(ivy_cache: Path) -> None:
131131
)
132132

133133

134+
def find_compiler_bridges(cache_dir: Path) -> list[tuple[str, str]]:
135+
"""Find compiler-bridge artifacts in cache and return (scala_version, bridge_version) tuples."""
136+
bridges = []
137+
138+
# sbt stores artifacts in cache_dir/cache/https/repo1.maven.org/maven2/org/scala-sbt/
139+
bridge_base = cache_dir / "cache" / "https" / "repo1.maven.org" / "maven2" / "org" / "scala-sbt"
140+
141+
if not bridge_base.exists():
142+
return bridges
143+
144+
for bridge_dir in bridge_base.glob("compiler-bridge_*"):
145+
scala_ver = bridge_dir.name.replace("compiler-bridge_", "")
146+
for version_dir in bridge_dir.iterdir():
147+
if version_dir.is_dir():
148+
bridges.append((scala_ver, version_dir.name))
149+
150+
return bridges
151+
152+
153+
def fetch_bridge_sources(cache_dir: Path, bridges: list[tuple[str, str]], env: dict) -> None:
154+
"""Fetch compiler-bridge sources and dependencies using coursier CLI.
155+
156+
sbt compiles the compiler-bridge from sources but doesn't cache the sources jar
157+
in the coursier cache. We need to explicitly fetch them for offline builds.
158+
We also fetch main artifacts since sources have transitive dependencies.
159+
"""
160+
if not bridges:
161+
return
162+
163+
log("=== Fetching compiler-bridge sources ===")
164+
165+
for scala_ver, bridge_ver in bridges:
166+
coord = f"org.scala-sbt:compiler-bridge_{scala_ver}:{bridge_ver}"
167+
log(f" Fetching sources and deps for {coord}")
168+
169+
# First fetch main artifacts (transitive dependencies)
170+
result = subprocess.run(
171+
["cs", "fetch", coord],
172+
env={**env, "COURSIER_CACHE": str(cache_dir)},
173+
capture_output=True,
174+
text=True,
175+
)
176+
177+
if result.returncode != 0:
178+
log(f" Warning: Failed to fetch deps for {coord}: {result.stderr}")
179+
180+
# Then fetch sources
181+
result = subprocess.run(
182+
["cs", "fetch", "--sources", coord],
183+
env={**env, "COURSIER_CACHE": str(cache_dir)},
184+
capture_output=True,
185+
text=True,
186+
)
187+
188+
if result.returncode != 0:
189+
log(f" Warning: Failed to fetch sources for {coord}: {result.stderr}")
190+
else:
191+
# Log the fetched source files
192+
for line in result.stdout.strip().split('\n'):
193+
if line and 'sources' in line:
194+
log(f" {line}")
195+
196+
134197
def path_to_url(path: Path, cache_dir: Path) -> str:
135198
"""Convert cache path to URL."""
136199
# Path structure: cache_dir/[cache/]https/repo.example.com/path/to/artifact
@@ -217,6 +280,11 @@ def _generate_lockfile_impl(project_dir: Path, config: Config, temp_home: Path)
217280
log(f"sbt failed:\n{result.stdout}\n{result.stderr}")
218281
raise RuntimeError(f"sbt command failed: {' '.join(cmd)}")
219282

283+
# Fetch compiler-bridge sources (sbt compiles these but doesn't cache the sources)
284+
bridges = find_compiler_bridges(coursier_cache)
285+
if bridges:
286+
fetch_bridge_sources(coursier_cache, bridges, env)
287+
220288
log("=== Phase 2: Generating lockfile ===")
221289

222290
# Assert no Ivy artifacts (modern sbt uses Coursier only)
@@ -243,6 +311,15 @@ def _generate_lockfile_impl(project_dir: Path, config: Config, temp_home: Path)
243311
if i % 100 == 0:
244312
log(f" Processed {i} artifacts...")
245313

314+
# Deduplicate entries (cs fetch and sbt may cache to different paths)
315+
seen_urls = set()
316+
unique_entries = []
317+
for entry in entries:
318+
if entry["url"] not in seen_urls:
319+
seen_urls.add(entry["url"])
320+
unique_entries.append(entry)
321+
entries = unique_entries
322+
246323
# Sort entries by URL for deterministic output
247324
entries.sort(key=lambda e: e["url"])
248325

0 commit comments

Comments
 (0)