@@ -29,13 +29,30 @@ def __init__(self, args: list[str]) -> None:
2929 self .args = args
3030
3131
32+ class ArtifactFetch :
33+ """Configuration for fetching an artifact with optional classifiers."""
34+
35+ def __init__ (self , coord : str , classifiers : list [str ] | None = None ) -> None :
36+ if not coord :
37+ raise ValueError ("ArtifactFetch requires a coord" )
38+ self .coord = coord
39+ self .classifiers = classifiers or []
40+
41+
3242class Config :
3343 """Configuration for lockfile generation."""
3444
35- def __init__ (self , sbt_runs : list [SbtRun ]) -> None :
45+ def __init__ (
46+ self ,
47+ sbt_runs : list [SbtRun ],
48+ shell_commands : list [list [str ]] | None = None ,
49+ fetch_artifacts : list [ArtifactFetch ] | None = None ,
50+ ) -> None :
3651 if not sbt_runs :
3752 raise ValueError ("Config requires at least one sbt_runs entry" )
3853 self .sbt_runs = sbt_runs
54+ self .shell_commands = shell_commands or []
55+ self .fetch_artifacts = fetch_artifacts or []
3956
4057 @staticmethod
4158 def load (config_path : Path ) -> "Config" :
@@ -60,7 +77,30 @@ def load(config_path: Path) -> "Config":
6077
6178 sbt_runs .append (SbtRun (run_data ["args" ]))
6279
63- return Config (sbt_runs )
80+ shell_commands = []
81+ if "shell_commands" in data :
82+ if not isinstance (data ["shell_commands" ], list ):
83+ raise ValueError ("'shell_commands' must be an array" )
84+ for i , cmd in enumerate (data ["shell_commands" ]):
85+ if not isinstance (cmd , list ):
86+ raise ValueError (f"shell_commands[{ i } ] must be an array of strings" )
87+ shell_commands .append (cmd )
88+
89+ fetch_artifacts = []
90+ if "fetch_artifacts" in data :
91+ if not isinstance (data ["fetch_artifacts" ], list ):
92+ raise ValueError ("'fetch_artifacts' must be an array" )
93+ for i , fetch_data in enumerate (data ["fetch_artifacts" ]):
94+ if not isinstance (fetch_data , dict ):
95+ raise ValueError (f"fetch_artifacts[{ i } ] must be an object" )
96+ if "coord" not in fetch_data :
97+ raise ValueError (f"fetch_artifacts[{ i } ] must contain 'coord' string" )
98+ classifiers = fetch_data .get ("classifiers" , [])
99+ if not isinstance (classifiers , list ):
100+ raise ValueError (f"fetch_artifacts[{ i } ].classifiers must be an array" )
101+ fetch_artifacts .append (ArtifactFetch (fetch_data ["coord" ], classifiers ))
102+
103+ return Config (sbt_runs , shell_commands , fetch_artifacts )
64104
65105
66106def log (message : str ) -> None :
@@ -194,6 +234,47 @@ def fetch_bridge_sources(cache_dir: Path, bridges: list[tuple[str, str]], env: d
194234 log (f" { line } " )
195235
196236
237+ def fetch_configured_artifacts (
238+ cache_dir : Path , fetch_artifacts : list [ArtifactFetch ], env : dict
239+ ) -> None :
240+ """Fetch explicitly configured artifacts with optional classifiers."""
241+ if not fetch_artifacts :
242+ return
243+
244+ log ("=== Fetching configured artifacts ===" )
245+
246+ for artifact in fetch_artifacts :
247+ log (f" Fetching { artifact .coord } " )
248+
249+ # Fetch main artifact and transitive dependencies
250+ result = subprocess .run (
251+ ["cs" , "fetch" , artifact .coord ],
252+ env = {** env , "COURSIER_CACHE" : str (cache_dir )},
253+ capture_output = True ,
254+ text = True ,
255+ )
256+
257+ if result .returncode != 0 :
258+ log (f" Warning: Failed to fetch { artifact .coord } : { result .stderr } " )
259+
260+ # Fetch each classifier (e.g., sources, javadoc)
261+ for classifier in artifact .classifiers :
262+ log (f" Fetching classifier: { classifier } " )
263+ result = subprocess .run (
264+ ["cs" , "fetch" , f"--classifier={ classifier } " , artifact .coord ],
265+ env = {** env , "COURSIER_CACHE" : str (cache_dir )},
266+ capture_output = True ,
267+ text = True ,
268+ )
269+
270+ if result .returncode != 0 :
271+ log (f" Warning: Failed to fetch { classifier } for { artifact .coord } : { result .stderr } " )
272+ else :
273+ for line in result .stdout .strip ().split ('\n ' ):
274+ if line :
275+ log (f" { line } " )
276+
277+
197278def path_to_url (path : Path , cache_dir : Path ) -> str :
198279 """Convert cache path to URL."""
199280 # Path structure: cache_dir/[cache/]https/repo.example.com/path/to/artifact
@@ -241,14 +322,20 @@ def _generate_lockfile_impl(project_dir: Path, config: Config, temp_home: Path)
241322 sbt_global .mkdir (parents = True )
242323 sbt_boot .mkdir (parents = True )
243324
244- # Environment for sbt
325+ # Environment for sbt and other tools
245326 env = os .environ .copy ()
246327 env ["HOME" ] = str (temp_home )
247328 env ["COURSIER_CACHE" ] = str (coursier_cache )
248329 env ["SBT_GLOBAL_BASE" ] = str (sbt_global )
249330 env ["SBT_BOOT_DIRECTORY" ] = str (sbt_boot )
250331 env ["SBT_OPTS" ] = f"-Dsbt.boot.directory={ sbt_boot } -Dsbt.coursier.home={ coursier_cache } "
251332
333+ # Override user.home for all JVM processes (ammonite, etc.)
334+ java_opts = f"-Duser.home={ temp_home } "
335+ if "_JAVA_OPTIONS" in env :
336+ java_opts = f"{ env ['_JAVA_OPTIONS' ]} { java_opts } "
337+ env ["_JAVA_OPTIONS" ] = java_opts
338+
252339 log ("=== Phase 1: Populating caches ===" )
253340 log (f"Home: { temp_home } " )
254341
@@ -261,6 +348,22 @@ def _generate_lockfile_impl(project_dir: Path, config: Config, temp_home: Path)
261348 if project_target .exists ():
262349 shutil .rmtree (project_target )
263350
351+ # Run shell commands before sbt (e.g., code generators)
352+ for i , cmd in enumerate (config .shell_commands , 1 ):
353+ # Expand environment variables in command arguments
354+ expanded_cmd = [os .path .expandvars (arg .replace ("$HOME" , env ["HOME" ])) for arg in cmd ]
355+ log (f"Running shell command ({ i } /{ len (config .shell_commands )} ): { ' ' .join (expanded_cmd )} " )
356+ result = subprocess .run (
357+ expanded_cmd ,
358+ env = env ,
359+ cwd = project_dir ,
360+ capture_output = True ,
361+ text = True ,
362+ )
363+ if result .returncode != 0 :
364+ log (f"Shell command failed:\n { result .stdout } \n { result .stderr } " )
365+ raise RuntimeError (f"Shell command failed: { ' ' .join (expanded_cmd )} " )
366+
264367 # Run sbt commands from config
265368 for i , sbt_run in enumerate (config .sbt_runs , 1 ):
266369 cmd = ["sbt" , "--batch" ] + sbt_run .args
@@ -285,6 +388,9 @@ def _generate_lockfile_impl(project_dir: Path, config: Config, temp_home: Path)
285388 if bridges :
286389 fetch_bridge_sources (coursier_cache , bridges , env )
287390
391+ # Fetch any explicitly configured artifacts
392+ fetch_configured_artifacts (coursier_cache , config .fetch_artifacts , env )
393+
288394 log ("=== Phase 2: Generating lockfile ===" )
289395
290396 # Assert no Ivy artifacts (modern sbt uses Coursier only)
0 commit comments