Add dependency management functionality and setup command

main
Joca 2025-10-10 22:56:59 -03:00
parent 0a94753748
commit 0783e68b0a
Signed by: jocadbz
GPG Key ID: B1836DCE2F50BDF7
4 changed files with 260 additions and 6 deletions

17
DOCS.md
View File

@ -149,6 +149,23 @@ lana --help
```
Shows commands, options, and config examples.
### Setup (dependencies)
```bash
lana setup
```
Fetches and extracts external dependencies declared in `config.ini` under `[dependencies]` sections. Each dependency supports the following keys:
- `name` - logical name for the dependency
- `url` - download URL or git repository
- `archive` - optional filename to save the downloaded archive under `dependencies/tmp`
- `checksum` - optional sha256 checksum to verify the archive
- `extract_to` - directory under `dependencies/` where files should be extracted or cloned
Notes:
- If `url` points to a git repository (ends with `.git`), `lana setup` will perform a shallow clone into `dependencies/<extract_to>`.
- For archive URLs `lana setup` will try `curl` then `wget` to download, will verify checksum if provided, and will extract common archive types (`.tar.gz`, `.tar.xz`, `.zip`).
- The current implementation performs a best-effort download/extract and prints warnings/errors; it is intentionally simple and can be extended or replaced by a more robust script if needed.
## Configuration
`config.ini` handles **global** settings (overridden by directives for per-file needs). Edit it in your project root.

View File

@ -47,6 +47,17 @@ __global:
verbose bool
}
// Dependency represents an external dependency to download/extract
pub struct Dependency {
__global:
name string
url string // download URL
archive string // relative archive path to save (under dependencies/tmp)
checksum string // optional checksum to verify
extract_to string // destination directory under dependencies/
build_cmds []string // optional semicolon-separated build commands
}
// BuildConfig holds the configuration for the project
pub struct BuildConfig {
__global:
@ -70,6 +81,7 @@ __global:
shaders_dir string = 'bin/shaders' // for shader compilation
dependencies_dir string = 'dependencies' // external dependencies
parallel_compilation bool = true // enable parallel builds
dependencies []Dependency
// Build directives from source files
build_directives []BuildDirective
@ -90,6 +102,7 @@ pub const default_config = BuildConfig{
verbose: false
shared_libs: []
tools: []
dependencies: []
}
// Parse build directives from source files
@ -194,6 +207,11 @@ pub fn (mut build_config BuildConfig) parse_build_directives() ! {
pub fn parse_args() !BuildConfig {
mut build_config := default_config
// Auto-load config.ini if present in current directory
if os.is_file('config.ini') {
build_config = parse_config_file('config.ini') or { build_config }
}
mut i := 2 // Skip program name and command
for i < os.args.len {
@ -319,17 +337,21 @@ pub fn parse_config_file(filename string) !BuildConfig {
mut current_section := ''
mut current_lib_index := 0
mut current_tool_index := 0
mut current_dep_index := 0
for line in lines {
if line.starts_with('#') || line.trim_space() == '' { continue }
if line.starts_with('[') && line.ends_with(']') {
current_section = line[1..line.len - 1]
// Reset indices when entering new sections
// keep the brackets in current_section to match existing match arms
current_section = '[' + line[1..line.len - 1] + ']'
// Point indices to the next entry index for repeated sections
if current_section == '[shared_libs]' {
current_lib_index = 0
current_lib_index = build_config.shared_libs.len
} else if current_section == '[tools]' {
current_tool_index = 0
current_tool_index = build_config.tools.len
} else if current_section == '[dependencies]' {
current_dep_index = build_config.dependencies.len
}
continue
}
@ -425,7 +447,7 @@ pub fn parse_config_file(filename string) !BuildConfig {
}
}
}
current_lib_index++
// keys for this shared_lib section are populated into the same struct
}
'[tools]' {
// Ensure we have a tool config to modify
@ -476,7 +498,34 @@ pub fn parse_config_file(filename string) !BuildConfig {
}
}
}
current_tool_index++
// keys for this tool section are populated into the same struct
}
'[dependencies]' {
// Ensure we have a dependency entry to modify
if current_dep_index >= build_config.dependencies.len {
build_config.dependencies << Dependency{}
}
mut dep := &build_config.dependencies[current_dep_index]
match key {
'name' { dep.name = value }
'url' { dep.url = value }
'archive' { dep.archive = value }
'checksum' { dep.checksum = value }
'build_cmds' {
cmds := value.split(';')
for c in cmds {
dep.build_cmds << c.trim_space()
}
}
'extract_to' { dep.extract_to = value }
else {
if build_config.verbose {
println('Warning: Unknown dependency config key: ${key}')
}
}
}
// keys for this dependency section are populated into the same struct
}
else {
if build_config.verbose {

181
deps/deps.v vendored
View File

@ -1,6 +1,8 @@
module deps
import os
import config
import os.cmdline
pub fn extract_dependencies(source_file string) ![]string {
mut dependencies := []string{}
@ -68,4 +70,183 @@ pub fn generate_dependency_file(source_file string, object_file string, dep_file
}
os.write_file(dep_file, content) or { }
}
// Fetch and extract dependencies declared in the build config
pub fn fetch_dependencies(build_config config.BuildConfig) ! {
if build_config.dependencies.len == 0 {
println('No dependencies declared in config')
return
}
tmp_dir := os.join_path(build_config.dependencies_dir, 'tmp')
os.mkdir_all(tmp_dir) or { return error('Failed to create tmp dir: ${err}') }
deps_dir := build_config.dependencies_dir
os.mkdir_all(deps_dir) or { return error('Failed to create dependencies dir: ${err}') }
for dep in build_config.dependencies {
if dep.name == '' {
println('Skipping unnamed dependency')
continue
}
println('Processing dependency: ${dep.name}')
println(' parsed: url="${dep.url}", archive="${dep.archive}", extract_to="${dep.extract_to}"')
if dep.url.trim_space() == '' {
return error('Dependency ${dep.name} has empty url in config')
}
// Decide if URL is a git repo or an archive
is_git := dep.url.ends_with('.git') || dep.url.starts_with('git://')
extract_to := if dep.extract_to != '' { os.join_path(deps_dir, dep.extract_to) } else { os.join_path(deps_dir, dep.name) }
if is_git {
// Clone repository
if os.is_dir(extract_to) {
println('Dependency already cloned at ${extract_to}, skipping')
continue
}
cmd := 'git clone --depth 1 ${dep.url} ${extract_to}'
println('Running: ${cmd}')
res := os.execute(cmd)
if res.exit_code != 0 {
return error('Failed to clone ${dep.url}: ${res.output}')
}
continue
}
// Archive download path
archive_name := if dep.archive != '' { dep.archive } else { os.file_name(dep.url) }
archive_path := os.join_path(tmp_dir, archive_name)
if !os.is_file(archive_path) {
println('Downloading ${dep.url} -> ${archive_path}')
// Prefer curl, fall back to wget
mut res := os.execute('curl -L -o ${archive_path} ${dep.url}')
if res.exit_code != 0 {
res = os.execute('wget -O ${archive_path} ${dep.url}')
if res.exit_code != 0 {
return error('Failed to download ${dep.url}: ${res.output}')
}
}
} else {
println('Archive already exists: ${archive_path}')
}
// Optionally verify checksum
if dep.checksum != '' {
// Use sha256sum if available
res := os.execute('sha256sum ${archive_path}')
if res.exit_code != 0 {
println('Warning: sha256sum not available to verify checksum')
} else {
parts := res.output.split(' ')
if parts.len > 0 && parts[0].trim_space() != dep.checksum {
return error('Checksum mismatch for ${archive_path}')
}
}
}
// Extract archive
if os.is_dir(extract_to) {
println('Already extracted to ${extract_to}, skipping')
continue
}
os.mkdir_all(extract_to) or { return error('Failed to create ${extract_to}: ${err}') }
// Basic extraction handling by extension
lower := archive_path.to_lower()
if lower.ends_with('.tar.gz') || lower.ends_with('.tgz') || lower.ends_with('.tar.xz') || lower.ends_with('.tar') {
cmd := 'tar -xf ${archive_path} -C ${deps_dir}'
println('Extracting with: ${cmd}')
res := os.execute(cmd)
if res.exit_code != 0 {
return error('Failed to extract ${archive_path}: ${res.output}')
}
// If the archive created a top-level dir, move/rename it to extract_to if needed
// We won't attempt to be clever here; caller should set extract_to to match archive content.
} else if lower.ends_with('.zip') {
cmd := 'unzip -q ${archive_path} -d ${extract_to}'
println('Extracting zip with: ${cmd}')
res := os.execute(cmd)
if res.exit_code != 0 {
return error('Failed to unzip ${archive_path}: ${res.output}')
}
} else {
println('Unknown archive format for ${archive_path}, skipping extraction')
}
// Run build commands if provided, otherwise run package-specific defaults
if dep.build_cmds.len > 0 {
for cmd_line in dep.build_cmds {
println('Running build command for ${dep.name}: ${cmd_line}')
// run in extract_to
old_cwd := os.getwd()
os.chdir(extract_to) or { return error('Failed to chdir: ${err}') }
res := os.execute(cmd_line)
os.chdir(old_cwd) or { }
if res.exit_code != 0 {
return error('Build command failed for ${dep.name}: ${res.output}')
}
}
} else {
// default build steps for known dependencies
match dep.name {
'zlib' {
println('Building zlib...')
old_cwd := os.getwd()
os.chdir(extract_to) or { return error('Failed to chdir: ${err}') }
mut res := os.execute('./configure')
if res.exit_code != 0 { os.chdir(old_cwd) or {} ; return error('zlib configure failed: ${res.output}') }
res = os.execute('make')
os.chdir(old_cwd) or {}
if res.exit_code != 0 { return error('zlib make failed: ${res.output}') }
}
'sockpp' {
println('Building sockpp...')
// Try cmake build in project dir (common layout)
build_dir := os.join_path(extract_to, 'build')
os.mkdir_all(build_dir) or { return error('Failed to create build dir: ${err}') }
old_cwd := os.getwd()
os.chdir(extract_to) or { return error('Failed to chdir: ${err}') }
mut res := os.execute('cmake -Bbuild .')
if res.exit_code != 0 { os.chdir(old_cwd) or {} ; return error('sockpp cmake failed: ${res.output}') }
res = os.execute('cmake --build build')
os.chdir(old_cwd) or {}
if res.exit_code != 0 { return error('sockpp build failed: ${res.output}') }
}
'shaderc' {
println('Building shaderc (invoke update script + ninja)')
old_cwd := os.getwd()
os.chdir(extract_to) or { return error('Failed to chdir: ${err}') }
mut res := os.execute('./update_shaderc_sources.py')
if res.exit_code != 0 { os.chdir(old_cwd) or {} ; return error('shaderc update failed: ${res.output}') }
// create build dir
build_dir := 'build-$(date +%s)'
os.mkdir_all(build_dir) or { os.chdir(old_cwd) or {} ; return error('Failed to create shaderc build dir') }
os.chdir(build_dir) or { os.chdir(old_cwd) or {} ; return error('Failed to chdir to shaderc build dir') }
res = os.execute('cmake -GNinja -DCMAKE_BUILD_TYPE=Release ../src/')
if res.exit_code != 0 { os.chdir(old_cwd) or {} ; return error('shaderc cmake failed: ${res.output}') }
res = os.execute('ninja')
os.chdir(old_cwd) or {}
if res.exit_code != 0 { return error('shaderc ninja failed: ${res.output}') }
// attempt to copy glslc to dependencies/shaderc/bin (best-effort)
glslc_path := os.join_path(extract_to, build_dir, 'glslc', 'glslc')
out_dir := os.join_path(build_config.dependencies_dir, dep.extract_to)
os.mkdir_all(os.join_path(out_dir, 'bin')) or { }
if os.is_file(glslc_path) {
os.cp(glslc_path, os.join_path(out_dir, 'bin', 'glslc')) or { println('Warning: failed to copy glslc: ${err}') }
}
}
else {}
}
}
}
println('Dependencies processed successfully')
// Clean up temporary download directory
if os.is_dir(tmp_dir) {
os.rmdir_all(tmp_dir) or { println('Warning: Failed to remove tmp dir: ${err}') }
}
}

7
lana.v
View File

@ -5,6 +5,7 @@ import config
import builder
import runner
import initializer
import deps
import help
const (
@ -45,6 +46,12 @@ fn main() {
}
}
'init' { initializer.init_project(os.args[2] or { 'myproject' }) }
'setup' {
deps.fetch_dependencies(config_data) or {
eprintln('Failed to fetch dependencies: ${err}')
exit(1)
}
}
else { help.show_help() }
}
}