diff --git a/cli/Günter Kunert_Aufklärung_Text.pdf b/cli/Günter Kunert_Aufklärung_Text.pdf new file mode 100644 index 0000000..f41b6c2 Binary files /dev/null and b/cli/Günter Kunert_Aufklärung_Text.pdf differ diff --git a/cli/cmd/root.go b/cli/cmd/root.go new file mode 100644 index 0000000..1c58057 --- /dev/null +++ b/cli/cmd/root.go @@ -0,0 +1,165 @@ +package cmd + +import ( + "fmt" + "os" + "path/filepath" + + tea "github.com/charmbracelet/bubbletea" + + "github.com/noauf/transmute-cli/internal/ffmpeg" + "github.com/noauf/transmute-cli/internal/tui" + "github.com/noauf/transmute-cli/internal/update" +) + +// Execute runs the CLI. +func Execute() { + args := os.Args[1:] + + // Handle flags + var outputDir string + var installFFmpeg bool + var doUpdate bool + var paths []string + + for i := 0; i < len(args); i++ { + switch args[i] { + case "-d", "--output-dir": + if i+1 < len(args) { + outputDir = args[i+1] + i++ + } else { + fmt.Fprintln(os.Stderr, "Error: -d requires a directory argument") + os.Exit(1) + } + case "--install-ffmpeg": + installFFmpeg = true + case "--update": + doUpdate = true + case "-h", "--help": + printUsage() + os.Exit(0) + case "-v", "--version": + fmt.Printf("transmute v%s\n", update.CurrentVersion) + os.Exit(0) + default: + if args[i][0] == '-' { + fmt.Fprintf(os.Stderr, "Unknown flag: %s\n", args[i]) + printUsage() + os.Exit(1) + } + paths = append(paths, args[i]) + } + } + + // Handle --install-ffmpeg + if installFFmpeg { + handleInstallFFmpeg() + return + } + + // Handle --update + if doUpdate { + handleUpdate() + return + } + + // If output dir specified, ensure it exists + if outputDir != "" { + if err := os.MkdirAll(outputDir, 0o755); err != nil { + fmt.Fprintf(os.Stderr, "Error creating output directory: %v\n", err) + os.Exit(1) + } + } + + // Expand glob patterns + expandedPaths := expandGlobs(paths) + + // If no paths given, use current directory + if len(expandedPaths) == 0 { + // Launch TUI with empty state — user can see instructions + expandedPaths = []string{"."} + } + + // Create and run TUI + model := tui.New(expandedPaths, outputDir) + p := tea.NewProgram(model, tea.WithAltScreen()) + if _, err := p.Run(); err != nil { + fmt.Fprintf(os.Stderr, "Error: %v\n", err) + os.Exit(1) + } +} + +func expandGlobs(patterns []string) []string { + var result []string + for _, pattern := range patterns { + matches, err := filepath.Glob(pattern) + if err != nil { + // Not a glob, treat as literal path + result = append(result, pattern) + continue + } + if len(matches) == 0 { + // No matches, keep original (might be a direct path) + result = append(result, pattern) + } else { + result = append(result, matches...) + } + } + return result +} + +func handleInstallFFmpeg() { + if ffmpeg.IsAvailable() { + path, _ := ffmpeg.Resolve() + fmt.Printf("ffmpeg is already available at: %s\n", path) + return + } + + fmt.Println("Downloading ffmpeg...") + err := ffmpeg.Download(func(downloaded int64) { + fmt.Printf("\r Downloaded %.1f MB", float64(downloaded)/1024/1024) + }) + if err != nil { + fmt.Fprintf(os.Stderr, "\nError downloading ffmpeg: %v\n", err) + os.Exit(1) + } + fmt.Println("\n ffmpeg installed successfully!") +} + +func handleUpdate() { + err := update.Run(func(msg string) { + fmt.Println(msg) + }) + if err != nil { + fmt.Fprintf(os.Stderr, "Update failed: %v\n", err) + os.Exit(1) + } +} + +func printUsage() { + fmt.Println(`transmute - Universal file converter + +Usage: + transmute [files...] Convert files interactively + transmute *.png Convert all PNG files + transmute ./photos/ Convert all files in directory + transmute file.csv -d ./output/ Output to specific directory + +Flags: + -d, --output-dir Output converted files to this directory + --install-ffmpeg Download and install ffmpeg for audio/video + --update Update transmute to the latest version + -h, --help Show this help + -v, --version Show version + +Interactive Controls: + ↑/↓ or j/k Navigate files + ←/→ or h/l Change target format + space Toggle file selection + a Select / deselect all + c or enter Start conversion + d Remove file from list + ? Toggle help + q or ctrl+c Quit`) +} diff --git a/cli/go.mod b/cli/go.mod new file mode 100644 index 0000000..00f13e7 --- /dev/null +++ b/cli/go.mod @@ -0,0 +1,48 @@ +module github.com/noauf/transmute-cli + +go 1.25.0 + +require ( + charm.land/lipgloss/v2 v2.0.1 + github.com/BurntSushi/toml v1.6.0 + github.com/charmbracelet/bubbles v1.0.0 + github.com/charmbracelet/bubbletea v1.3.10 + github.com/russross/blackfriday/v2 v2.1.0 + github.com/xuri/excelize/v2 v2.10.1 + golang.org/x/image v0.36.0 + gopkg.in/yaml.v3 v3.0.1 +) + +require ( + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect + github.com/charmbracelet/colorprofile v0.4.2 // indirect + github.com/charmbracelet/lipgloss v1.1.0 // indirect + github.com/charmbracelet/ultraviolet v0.0.0-20251205161215-1948445e3318 // indirect + github.com/charmbracelet/x/ansi v0.11.6 // indirect + github.com/charmbracelet/x/cellbuf v0.0.15 // indirect + github.com/charmbracelet/x/term v0.2.2 // indirect + github.com/charmbracelet/x/termios v0.1.1 // indirect + github.com/charmbracelet/x/windows v0.2.2 // indirect + github.com/clipperhouse/displaywidth v0.11.0 // indirect + github.com/clipperhouse/uax29/v2 v2.7.0 // indirect + github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect + github.com/lucasb-eyer/go-colorful v1.3.0 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-localereader v0.0.1 // indirect + github.com/mattn/go-runewidth v0.0.19 // indirect + github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect + github.com/muesli/cancelreader v0.2.2 // indirect + github.com/muesli/termenv v0.16.0 // indirect + github.com/richardlehane/mscfb v1.0.6 // indirect + github.com/richardlehane/msoleps v1.0.6 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/tiendc/go-deepcopy v1.7.2 // indirect + github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect + github.com/xuri/efp v0.0.1 // indirect + github.com/xuri/nfp v0.0.2-0.20250530014748-2ddeb826f9a9 // indirect + golang.org/x/crypto v0.48.0 // indirect + golang.org/x/net v0.50.0 // indirect + golang.org/x/sync v0.19.0 // indirect + golang.org/x/sys v0.42.0 // indirect + golang.org/x/text v0.34.0 // indirect +) diff --git a/cli/go.sum b/cli/go.sum new file mode 100644 index 0000000..8cc94b4 --- /dev/null +++ b/cli/go.sum @@ -0,0 +1,90 @@ +charm.land/lipgloss/v2 v2.0.1 h1:6Xzrn49+Py1Um5q/wZG1gWgER2+7dUyZ9XMEufqPSys= +charm.land/lipgloss/v2 v2.0.1/go.mod h1:KjPle2Qd3YmvP1KL5OMHiHysGcNwq6u83MUjYkFvEkM= +github.com/BurntSushi/toml v1.6.0 h1:dRaEfpa2VI55EwlIW72hMRHdWouJeRF7TPYhI+AUQjk= +github.com/BurntSushi/toml v1.6.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= +github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= +github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= +github.com/charmbracelet/bubbles v1.0.0 h1:12J8/ak/uCZEMQ6KU7pcfwceyjLlWsDLAxB5fXonfvc= +github.com/charmbracelet/bubbles v1.0.0/go.mod h1:9d/Zd5GdnauMI5ivUIVisuEm3ave1XwXtD1ckyV6r3E= +github.com/charmbracelet/bubbletea v1.3.10 h1:otUDHWMMzQSB0Pkc87rm691KZ3SWa4KUlvF9nRvCICw= +github.com/charmbracelet/bubbletea v1.3.10/go.mod h1:ORQfo0fk8U+po9VaNvnV95UPWA1BitP1E0N6xJPlHr4= +github.com/charmbracelet/colorprofile v0.4.2 h1:BdSNuMjRbotnxHSfxy+PCSa4xAmz7szw70ktAtWRYrY= +github.com/charmbracelet/colorprofile v0.4.2/go.mod h1:0rTi81QpwDElInthtrQ6Ni7cG0sDtwAd4C4le060fT8= +github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY= +github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30= +github.com/charmbracelet/ultraviolet v0.0.0-20251205161215-1948445e3318 h1:OqDqxQZliC7C8adA7KjelW3OjtAxREfeHkNcd66wpeI= +github.com/charmbracelet/ultraviolet v0.0.0-20251205161215-1948445e3318/go.mod h1:Y6kE2GzHfkyQQVCSL9r2hwokSrIlHGzZG+71+wDYSZI= +github.com/charmbracelet/x/ansi v0.11.6 h1:GhV21SiDz/45W9AnV2R61xZMRri5NlLnl6CVF7ihZW8= +github.com/charmbracelet/x/ansi v0.11.6/go.mod h1:2JNYLgQUsyqaiLovhU2Rv/pb8r6ydXKS3NIttu3VGZQ= +github.com/charmbracelet/x/cellbuf v0.0.15 h1:ur3pZy0o6z/R7EylET877CBxaiE1Sp1GMxoFPAIztPI= +github.com/charmbracelet/x/cellbuf v0.0.15/go.mod h1:J1YVbR7MUuEGIFPCaaZ96KDl5NoS0DAWkskup+mOY+Q= +github.com/charmbracelet/x/term v0.2.2 h1:xVRT/S2ZcKdhhOuSP4t5cLi5o+JxklsoEObBSgfgZRk= +github.com/charmbracelet/x/term v0.2.2/go.mod h1:kF8CY5RddLWrsgVwpw4kAa6TESp6EB5y3uxGLeCqzAI= +github.com/charmbracelet/x/termios v0.1.1 h1:o3Q2bT8eqzGnGPOYheoYS8eEleT5ZVNYNy8JawjaNZY= +github.com/charmbracelet/x/termios v0.1.1/go.mod h1:rB7fnv1TgOPOyyKRJ9o+AsTU/vK5WHJ2ivHeut/Pcwo= +github.com/charmbracelet/x/windows v0.2.2 h1:IofanmuvaxnKHuV04sC0eBy/smG6kIKrWG2/jYn2GuM= +github.com/charmbracelet/x/windows v0.2.2/go.mod h1:/8XtdKZzedat74NQFn0NGlGL4soHB0YQZrETF96h75k= +github.com/clipperhouse/displaywidth v0.11.0 h1:lBc6kY44VFw+TDx4I8opi/EtL9m20WSEFgwIwO+UVM8= +github.com/clipperhouse/displaywidth v0.11.0/go.mod h1:bkrFNkf81G8HyVqmKGxsPufD3JhNl3dSqnGhOoSD/o0= +github.com/clipperhouse/uax29/v2 v2.7.0 h1:+gs4oBZ2gPfVrKPthwbMzWZDaAFPGYK72F0NJv2v7Vk= +github.com/clipperhouse/uax29/v2 v2.7.0/go.mod h1:EFJ2TJMRUaplDxHKj1qAEhCtQPW2tJSwu5BF98AuoVM= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM= +github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQwVHXptag= +github.com/lucasb-eyer/go-colorful v1.3.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4= +github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88= +github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw= +github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo= +github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA= +github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo= +github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc= +github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/richardlehane/mscfb v1.0.6 h1:eN3bvvZCp00bs7Zf52bxNwAx5lJDBK1tCuH19qq5aC8= +github.com/richardlehane/mscfb v1.0.6/go.mod h1:pe0+IUIc0AHh0+teNzBlJCtSyZdFOGgV4ZK9bsoV+Jo= +github.com/richardlehane/msoleps v1.0.6 h1:9BvkpjvD+iUBalUY4esMwv6uBkfOip/Lzvd93jvR9gg= +github.com/richardlehane/msoleps v1.0.6/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/tiendc/go-deepcopy v1.7.2 h1:Ut2yYR7W9tWjTQitganoIue4UGxZwCcJy3orjrrIj44= +github.com/tiendc/go-deepcopy v1.7.2/go.mod h1:4bKjNC2r7boYOkD2IOuZpYjmlDdzjbpTRyCx+goBCJQ= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM= +github.com/xuri/efp v0.0.1 h1:fws5Rv3myXyYni8uwj2qKjVaRP30PdjeYe2Y6FDsCL8= +github.com/xuri/efp v0.0.1/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI= +github.com/xuri/excelize/v2 v2.10.1 h1:V62UlqopMqha3kOpnlHy2CcRVw1V8E63jFoWUmMzxN0= +github.com/xuri/excelize/v2 v2.10.1/go.mod h1:iG5tARpgaEeIhTqt3/fgXCGoBRt4hNXgCp3tfXKoOIc= +github.com/xuri/nfp v0.0.2-0.20250530014748-2ddeb826f9a9 h1:+C0TIdyyYmzadGaL/HBLbf3WdLgC29pgyhTjAT/0nuE= +github.com/xuri/nfp v0.0.2-0.20250530014748-2ddeb826f9a9/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ= +golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts= +golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos= +golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI= +golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo= +golang.org/x/image v0.36.0 h1:Iknbfm1afbgtwPTmHnS2gTM/6PPZfH+z2EFuOkSbqwc= +golang.org/x/image v0.36.0/go.mod h1:YsWD2TyyGKiIX1kZlu9QfKIsQ4nAAK9bdgdrIsE7xy4= +golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60= +golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM= +golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= +golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.42.0 h1:omrd2nAlyT5ESRdCLYdm3+fMfNFE/+Rf4bDIQImRJeo= +golang.org/x/sys v0.42.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw= +golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk= +golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/cli/internal/converter/converter.go b/cli/internal/converter/converter.go new file mode 100644 index 0000000..c3daabb --- /dev/null +++ b/cli/internal/converter/converter.go @@ -0,0 +1,76 @@ +package converter + +import ( + "fmt" + "path/filepath" + "strings" + + "github.com/noauf/transmute-cli/internal/detect" +) + +// Result holds the outcome of a single conversion. +type Result struct { + InputPath string + OutputPath string + Err error +} + +// Convert is the main entry point. It routes to the correct converter based on +// the source file's category. +func Convert(inputPath, targetFormat, outputDir string) Result { + ext := strings.TrimPrefix(filepath.Ext(inputPath), ".") + ext = strings.ToLower(ext) + + cat := detect.DetectCategory(ext) + + // Determine output path + base := strings.TrimSuffix(filepath.Base(inputPath), filepath.Ext(inputPath)) + dir := filepath.Dir(inputPath) + if outputDir != "" { + dir = outputDir + } + outPath := filepath.Join(dir, base+"."+targetFormat) + + // Avoid overwriting — append _converted if output == input + if outPath == inputPath { + outPath = filepath.Join(dir, base+"_converted."+targetFormat) + } + + var err error + switch cat { + case detect.CategoryImage: + err = convertImage(inputPath, outPath, targetFormat) + case detect.CategoryDocument: + err = convertDocument(inputPath, outPath, ext, targetFormat) + case detect.CategoryAudio, detect.CategoryVideo: + err = convertMedia(inputPath, outPath, targetFormat) + case detect.CategoryData: + err = convertData(inputPath, outPath, ext, targetFormat) + case detect.CategorySpreadsheet: + err = convertSpreadsheet(inputPath, outPath, ext, targetFormat) + case detect.CategoryFont: + err = convertFont(inputPath, outPath, ext, targetFormat) + default: + err = fmt.Errorf("unsupported file type: %s", ext) + } + + return Result{ + InputPath: inputPath, + OutputPath: outPath, + Err: err, + } +} + +// OutputPath computes what the output path would be without performing conversion. +func OutputPath(inputPath, targetFormat, outputDir string) string { + base := strings.TrimSuffix(filepath.Base(inputPath), filepath.Ext(inputPath)) + dir := filepath.Dir(inputPath) + if outputDir != "" { + dir = outputDir + } + outPath := filepath.Join(dir, base+"."+targetFormat) + if outPath == inputPath { + outPath = filepath.Join(dir, base+"_converted."+targetFormat) + } + return outPath +} diff --git a/cli/internal/converter/data.go b/cli/internal/converter/data.go new file mode 100644 index 0000000..adb81c2 --- /dev/null +++ b/cli/internal/converter/data.go @@ -0,0 +1,452 @@ +package converter + +import ( + "encoding/csv" + "encoding/json" + "encoding/xml" + "fmt" + "os" + "strings" + + "github.com/BurntSushi/toml" + "gopkg.in/yaml.v3" +) + +func convertData(inputPath, outputPath, sourceExt, targetFormat string) error { + // Strategy: parse input into a generic Go structure, then serialize to target format. + // For tabular data (CSV, TSV) we use [][]string -> []map[string]interface{} (first row = headers). + // For structured data (JSON, YAML, TOML, XML) we use interface{}. + + raw, err := os.ReadFile(inputPath) + if err != nil { + return fmt.Errorf("reading input: %w", err) + } + + // Determine if source is tabular or structured + switch sourceExt { + case "csv", "tsv": + return convertTabularData(raw, outputPath, sourceExt, targetFormat) + case "json": + var data interface{} + if err := json.Unmarshal(raw, &data); err != nil { + return fmt.Errorf("parsing JSON: %w", err) + } + return writeData(data, outputPath, targetFormat) + case "ndjson", "jsonl": + return convertNDJSON(raw, outputPath, targetFormat) + case "yaml", "yml": + var data interface{} + if err := yaml.Unmarshal(raw, &data); err != nil { + return fmt.Errorf("parsing YAML: %w", err) + } + return writeData(data, outputPath, targetFormat) + case "toml": + var data interface{} + if err := toml.Unmarshal(raw, &data); err != nil { + return fmt.Errorf("parsing TOML: %w", err) + } + return writeData(data, outputPath, targetFormat) + case "xml": + return convertXML(raw, outputPath, targetFormat) + case "ini", "env", "properties": + data := parseKeyValue(string(raw), sourceExt) + return writeData(data, outputPath, targetFormat) + case "sql": + return convertSQL(raw, outputPath, targetFormat) + default: + return fmt.Errorf("unsupported data source format: %s", sourceExt) + } +} + +func convertTabularData(raw []byte, outputPath, sourceExt, targetFormat string) error { + delimiter := ',' + if sourceExt == "tsv" { + delimiter = '\t' + } + + reader := csv.NewReader(strings.NewReader(string(raw))) + reader.Comma = delimiter + reader.LazyQuotes = true + + records, err := reader.ReadAll() + if err != nil { + return fmt.Errorf("parsing %s: %w", sourceExt, err) + } + + if len(records) == 0 { + return fmt.Errorf("empty %s file", sourceExt) + } + + // Convert to []map[string]interface{} using first row as headers + headers := records[0] + var rows []map[string]interface{} + for _, record := range records[1:] { + row := make(map[string]interface{}) + for i, header := range headers { + if i < len(record) { + row[header] = record[i] + } + } + rows = append(rows, row) + } + + switch targetFormat { + case "json": + return writeJSON(rows, outputPath) + case "yaml", "yml": + return writeYAML(rows, outputPath) + case "toml": + wrapper := map[string]interface{}{"data": rows} + return writeTOML(wrapper, outputPath) + case "xml": + return writeXMLFromRows(rows, outputPath) + case "tsv": + return writeDelimited(headers, records[1:], outputPath, '\t') + case "csv": + return writeDelimited(headers, records[1:], outputPath, ',') + case "html": + return writeHTMLTable(headers, records[1:], outputPath) + case "sql": + return writeSQLInserts(headers, records[1:], outputPath, "data") + case "ndjson": + return writeNDJSON(rows, outputPath) + default: + return fmt.Errorf("unsupported target format for tabular data: %s", targetFormat) + } +} + +func convertNDJSON(raw []byte, outputPath, targetFormat string) error { + lines := strings.Split(strings.TrimSpace(string(raw)), "\n") + var items []interface{} + for _, line := range lines { + line = strings.TrimSpace(line) + if line == "" { + continue + } + var item interface{} + if err := json.Unmarshal([]byte(line), &item); err != nil { + continue // skip invalid lines + } + items = append(items, item) + } + return writeData(items, outputPath, targetFormat) +} + +func convertXML(raw []byte, outputPath, targetFormat string) error { + // Simple XML -> generic map conversion + var data interface{} + if err := xml.Unmarshal(raw, &data); err != nil { + // XML to map is tricky — treat as string content for simple cases + // or use a simple parser + data = map[string]interface{}{"xml_content": string(raw)} + } + return writeData(data, outputPath, targetFormat) +} + +func convertSQL(raw []byte, outputPath, targetFormat string) error { + // Very basic: extract INSERT statement values + content := string(raw) + lines := strings.Split(content, "\n") + var records []map[string]interface{} + + for _, line := range lines { + line = strings.TrimSpace(line) + upper := strings.ToUpper(line) + if strings.HasPrefix(upper, "INSERT") { + // Very basic INSERT parser — extract values + valIdx := strings.Index(upper, "VALUES") + if valIdx == -1 { + continue + } + valPart := line[valIdx+6:] + valPart = strings.Trim(valPart, " ;()") + values := strings.Split(valPart, ",") + row := make(map[string]interface{}) + for i, v := range values { + v = strings.TrimSpace(v) + v = strings.Trim(v, "'\"") + row[fmt.Sprintf("col%d", i+1)] = v + } + records = append(records, row) + } + } + + if targetFormat == "json" { + return writeJSON(records, outputPath) + } + if targetFormat == "csv" { + // Flatten to CSV + if len(records) == 0 { + return os.WriteFile(outputPath, []byte(""), 0o644) + } + var headers []string + for k := range records[0] { + headers = append(headers, k) + } + var csvRecords [][]string + for _, r := range records { + var row []string + for _, h := range headers { + row = append(row, fmt.Sprintf("%v", r[h])) + } + csvRecords = append(csvRecords, row) + } + return writeDelimited(headers, csvRecords, outputPath, ',') + } + return fmt.Errorf("unsupported target format for SQL: %s", targetFormat) +} + +func parseKeyValue(content, format string) map[string]interface{} { + result := make(map[string]interface{}) + lines := strings.Split(content, "\n") + + for _, line := range lines { + line = strings.TrimSpace(line) + if line == "" || strings.HasPrefix(line, "#") || strings.HasPrefix(line, ";") { + continue + } + // Handle different separators + sep := "=" + if format == "properties" && strings.Contains(line, ":") && !strings.Contains(line, "=") { + sep = ":" + } + parts := strings.SplitN(line, sep, 2) + if len(parts) == 2 { + key := strings.TrimSpace(parts[0]) + value := strings.TrimSpace(parts[1]) + // Remove quotes from .env values + if format == "env" { + value = strings.Trim(value, "\"'") + } + result[key] = value + } + } + return result +} + +// ─── Writers ───────────────────────────────────────────────── + +func writeData(data interface{}, outputPath, targetFormat string) error { + switch targetFormat { + case "json": + return writeJSON(data, outputPath) + case "yaml", "yml": + return writeYAML(data, outputPath) + case "toml": + return writeTOML(data, outputPath) + case "csv": + return writeDataAsCSV(data, outputPath) + case "tsv": + return writeDataAsTSV(data, outputPath) + case "xml": + return writeXMLGeneric(data, outputPath) + case "html": + return writeDataAsHTML(data, outputPath) + case "ndjson": + return writeDataAsNDJSON(data, outputPath) + default: + return fmt.Errorf("unsupported target format: %s", targetFormat) + } +} + +func writeJSON(data interface{}, outputPath string) error { + b, err := json.MarshalIndent(data, "", " ") + if err != nil { + return err + } + return os.WriteFile(outputPath, b, 0o644) +} + +func writeYAML(data interface{}, outputPath string) error { + b, err := yaml.Marshal(data) + if err != nil { + return err + } + return os.WriteFile(outputPath, b, 0o644) +} + +func writeTOML(data interface{}, outputPath string) error { + f, err := os.Create(outputPath) + if err != nil { + return err + } + defer f.Close() + enc := toml.NewEncoder(f) + return enc.Encode(data) +} + +func writeDelimited(headers []string, rows [][]string, outputPath string, sep rune) error { + f, err := os.Create(outputPath) + if err != nil { + return err + } + defer f.Close() + + w := csv.NewWriter(f) + w.Comma = sep + if err := w.Write(headers); err != nil { + return err + } + for _, row := range rows { + if err := w.Write(row); err != nil { + return err + } + } + w.Flush() + return w.Error() +} + +func writeHTMLTable(headers []string, rows [][]string, outputPath string) error { + var sb strings.Builder + sb.WriteString("\n\n") + for _, h := range headers { + sb.WriteString("") + } + sb.WriteString("\n\n\n") + for _, row := range rows { + sb.WriteString("") + for _, cell := range row { + sb.WriteString("") + } + sb.WriteString("\n") + } + sb.WriteString("\n
" + h + "
" + cell + "
") + return os.WriteFile(outputPath, []byte(sb.String()), 0o644) +} + +func writeSQLInserts(headers []string, rows [][]string, outputPath, tableName string) error { + var sb strings.Builder + cols := strings.Join(headers, ", ") + for _, row := range rows { + var vals []string + for _, v := range row { + vals = append(vals, "'"+strings.ReplaceAll(v, "'", "''")+"'") + } + sb.WriteString(fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s);\n", + tableName, cols, strings.Join(vals, ", "))) + } + return os.WriteFile(outputPath, []byte(sb.String()), 0o644) +} + +func writeNDJSON(rows []map[string]interface{}, outputPath string) error { + var sb strings.Builder + for _, row := range rows { + b, err := json.Marshal(row) + if err != nil { + return err + } + sb.Write(b) + sb.WriteByte('\n') + } + return os.WriteFile(outputPath, []byte(sb.String()), 0o644) +} + +func writeXMLFromRows(rows []map[string]interface{}, outputPath string) error { + var sb strings.Builder + sb.WriteString("\n\n") + for _, row := range rows { + sb.WriteString(" \n") + for k, v := range row { + sb.WriteString(fmt.Sprintf(" <%s>%v\n", k, v, k)) + } + sb.WriteString(" \n") + } + sb.WriteString("") + return os.WriteFile(outputPath, []byte(sb.String()), 0o644) +} + +func writeXMLGeneric(data interface{}, outputPath string) error { + b, err := json.MarshalIndent(data, "", " ") + if err != nil { + return err + } + // Wrap JSON in XML as a simple approach + content := "\n\n" + string(b) + "\n" + return os.WriteFile(outputPath, []byte(content), 0o644) +} + +func writeDataAsCSV(data interface{}, outputPath string) error { + rows := toRowsOfMaps(data) + if len(rows) == 0 { + return os.WriteFile(outputPath, []byte(""), 0o644) + } + headers := extractHeaders(rows[0]) + var records [][]string + for _, row := range rows { + var record []string + for _, h := range headers { + record = append(record, fmt.Sprintf("%v", row[h])) + } + records = append(records, record) + } + return writeDelimited(headers, records, outputPath, ',') +} + +func writeDataAsTSV(data interface{}, outputPath string) error { + rows := toRowsOfMaps(data) + if len(rows) == 0 { + return os.WriteFile(outputPath, []byte(""), 0o644) + } + headers := extractHeaders(rows[0]) + var records [][]string + for _, row := range rows { + var record []string + for _, h := range headers { + record = append(record, fmt.Sprintf("%v", row[h])) + } + records = append(records, record) + } + return writeDelimited(headers, records, outputPath, '\t') +} + +func writeDataAsHTML(data interface{}, outputPath string) error { + b, err := json.MarshalIndent(data, "", " ") + if err != nil { + return err + } + html := "
" + string(b) + "
" + return os.WriteFile(outputPath, []byte(html), 0o644) +} + +func writeDataAsNDJSON(data interface{}, outputPath string) error { + rows := toRowsOfMaps(data) + var sb strings.Builder + for _, row := range rows { + b, err := json.Marshal(row) + if err != nil { + return err + } + sb.Write(b) + sb.WriteByte('\n') + } + return os.WriteFile(outputPath, []byte(sb.String()), 0o644) +} + +// ─── Helpers ───────────────────────────────────────────────── + +func toRowsOfMaps(data interface{}) []map[string]interface{} { + switch v := data.(type) { + case []interface{}: + var rows []map[string]interface{} + for _, item := range v { + if m, ok := item.(map[string]interface{}); ok { + rows = append(rows, m) + } + } + return rows + case []map[string]interface{}: + return v + case map[string]interface{}: + return []map[string]interface{}{v} + default: + return nil + } +} + +func extractHeaders(row map[string]interface{}) []string { + var headers []string + for k := range row { + headers = append(headers, k) + } + return headers +} diff --git a/cli/internal/converter/document.go b/cli/internal/converter/document.go new file mode 100644 index 0000000..b75a8ba --- /dev/null +++ b/cli/internal/converter/document.go @@ -0,0 +1,326 @@ +package converter + +import ( + "archive/zip" + "fmt" + "io" + "os" + "os/exec" + "strings" + + "github.com/russross/blackfriday/v2" +) + +func convertDocument(inputPath, outputPath, sourceExt, targetFormat string) error { + raw, err := os.ReadFile(inputPath) + if err != nil { + return fmt.Errorf("reading document: %w", err) + } + content := string(raw) + + switch sourceExt { + case "txt": + return convertFromTxt(content, outputPath, targetFormat) + case "md": + return convertFromMarkdown(content, outputPath, targetFormat) + case "html", "htm": + return convertFromHTML(content, outputPath, targetFormat) + case "rtf": + return convertFromRTF(content, outputPath, targetFormat) + case "docx": + return convertDocx(inputPath, outputPath, targetFormat) + case "pdf": + return convertPdf(inputPath, outputPath, targetFormat) + default: + return fmt.Errorf("unsupported document source: %s", sourceExt) + } +} + +// ─── TXT conversions ───────────────────────────────────────── + +func convertFromTxt(content, outputPath, target string) error { + switch target { + case "html": + html := "
" + escapeHTML(content) + "
" + return os.WriteFile(outputPath, []byte(html), 0o644) + case "md": + return os.WriteFile(outputPath, []byte(content), 0o644) + case "pdf": + return textToPDF(content, outputPath) + default: + return fmt.Errorf("unsupported target for txt: %s", target) + } +} + +// ─── Markdown conversions ──────────────────────────────────── + +func convertFromMarkdown(content, outputPath, target string) error { + switch target { + case "html": + html := blackfriday.Run([]byte(content)) + wrapped := "" + string(html) + "" + return os.WriteFile(outputPath, []byte(wrapped), 0o644) + case "txt": + text := stripMarkdown(content) + return os.WriteFile(outputPath, []byte(text), 0o644) + case "pdf": + html := string(blackfriday.Run([]byte(content))) + return htmlToPDF(html, outputPath) + default: + return fmt.Errorf("unsupported target for md: %s", target) + } +} + +// ─── HTML conversions ──────────────────────────────────────── + +func convertFromHTML(content, outputPath, target string) error { + switch target { + case "txt": + text := stripHTMLTags(content) + return os.WriteFile(outputPath, []byte(text), 0o644) + case "md": + md := htmlToMarkdown(content) + return os.WriteFile(outputPath, []byte(md), 0o644) + case "pdf": + return htmlToPDF(content, outputPath) + default: + return fmt.Errorf("unsupported target for html: %s", target) + } +} + +// ─── RTF conversions ───────────────────────────────────────── + +func convertFromRTF(content, outputPath, target string) error { + text := stripRTF(content) + switch target { + case "txt": + return os.WriteFile(outputPath, []byte(text), 0o644) + case "html": + html := "
" + escapeHTML(text) + "
" + return os.WriteFile(outputPath, []byte(html), 0o644) + case "md": + return os.WriteFile(outputPath, []byte(text), 0o644) + default: + return fmt.Errorf("unsupported target for rtf: %s", target) + } +} + +// ─── DOCX conversions ──────────────────────────────────────── + +func convertDocx(inputPath, outputPath, target string) error { + text, err := extractDocxText(inputPath) + if err != nil { + return fmt.Errorf("extracting DOCX text: %w", err) + } + + switch target { + case "txt": + return os.WriteFile(outputPath, []byte(text), 0o644) + case "html": + html := "
" + escapeHTML(text) + "
" + return os.WriteFile(outputPath, []byte(html), 0o644) + case "md": + return os.WriteFile(outputPath, []byte(text), 0o644) + case "pdf": + return textToPDF(text, outputPath) + default: + return fmt.Errorf("unsupported target for docx: %s", target) + } +} + +// ─── PDF conversions ───────────────────────────────────────── + +func convertPdf(inputPath, outputPath, target string) error { + text, err := extractPDFText(inputPath) + if err != nil { + return fmt.Errorf("extracting PDF text: %w", err) + } + + switch target { + case "txt": + return os.WriteFile(outputPath, []byte(text), 0o644) + case "html": + html := "
" + escapeHTML(text) + "
" + return os.WriteFile(outputPath, []byte(html), 0o644) + case "md": + return os.WriteFile(outputPath, []byte(text), 0o644) + default: + return fmt.Errorf("unsupported target for pdf: %s", target) + } +} + +// ─── Helpers ───────────────────────────────────────────────── + +func escapeHTML(s string) string { + s = strings.ReplaceAll(s, "&", "&") + s = strings.ReplaceAll(s, "<", "<") + s = strings.ReplaceAll(s, ">", ">") + return s +} + +func stripHTMLTags(html string) string { + var result strings.Builder + inTag := false + for _, r := range html { + switch { + case r == '<': + inTag = true + case r == '>': + inTag = false + case !inTag: + result.WriteRune(r) + } + } + return strings.TrimSpace(result.String()) +} + +func stripMarkdown(md string) string { + lines := strings.Split(md, "\n") + var result []string + for _, line := range lines { + line = strings.TrimLeft(line, "# ") + line = strings.ReplaceAll(line, "**", "") + line = strings.ReplaceAll(line, "*", "") + line = strings.ReplaceAll(line, "__", "") + line = strings.ReplaceAll(line, "_", "") + line = strings.ReplaceAll(line, "`", "") + result = append(result, line) + } + return strings.Join(result, "\n") +} + +func htmlToMarkdown(html string) string { + md := html + md = strings.ReplaceAll(md, "
", "\n") + md = strings.ReplaceAll(md, "
", "\n") + md = strings.ReplaceAll(md, "
", "\n") + md = strings.ReplaceAll(md, "

", "\n") + md = strings.ReplaceAll(md, "

", "\n") + md = strings.ReplaceAll(md, "", "**") + md = strings.ReplaceAll(md, "", "**") + md = strings.ReplaceAll(md, "", "*") + md = strings.ReplaceAll(md, "", "*") + md = strings.ReplaceAll(md, "

", "# ") + md = strings.ReplaceAll(md, "

", "\n") + md = strings.ReplaceAll(md, "

", "## ") + md = strings.ReplaceAll(md, "

", "\n") + md = strings.ReplaceAll(md, "

", "### ") + md = strings.ReplaceAll(md, "

", "\n") + md = stripHTMLTags(md) + return strings.TrimSpace(md) +} + +func stripRTF(rtf string) string { + var result strings.Builder + i := 0 + depth := 0 + for i < len(rtf) { + ch := rtf[i] + switch { + case ch == '{': + depth++ + i++ + case ch == '}': + depth-- + i++ + case ch == '\\': + i++ + if i < len(rtf) && rtf[i] == '\'' { + i += 3 + } else { + for i < len(rtf) && ((rtf[i] >= 'a' && rtf[i] <= 'z') || (rtf[i] >= 'A' && rtf[i] <= 'Z')) { + i++ + } + for i < len(rtf) && ((rtf[i] >= '0' && rtf[i] <= '9') || rtf[i] == '-') { + i++ + } + if i < len(rtf) && rtf[i] == ' ' { + i++ + } + } + default: + if depth <= 1 { + result.WriteByte(ch) + } + i++ + } + } + return strings.TrimSpace(result.String()) +} + +// extractDocxText extracts plain text from a .docx file (ZIP of XML files). +func extractDocxText(path string) (string, error) { + r, err := zip.OpenReader(path) + if err != nil { + return "", fmt.Errorf("opening docx: %w", err) + } + defer r.Close() + + for _, f := range r.File { + if f.Name == "word/document.xml" { + rc, err := f.Open() + if err != nil { + return "", err + } + defer rc.Close() + data, err := io.ReadAll(rc) + if err != nil { + return "", err + } + return stripHTMLTags(string(data)), nil + } + } + return "", fmt.Errorf("word/document.xml not found in docx") +} + +// extractPDFText tries pdftotext (poppler-utils), falls back to error. +func extractPDFText(path string) (string, error) { + pdftotextPath, err := exec.LookPath("pdftotext") + if err != nil { + return "", fmt.Errorf("PDF text extraction requires 'pdftotext' — install poppler-utils") + } + out, err := exec.Command(pdftotextPath, path, "-").CombinedOutput() + if err != nil { + return "", fmt.Errorf("pdftotext failed: %w\n%s", err, string(out)) + } + return string(out), nil +} + +// textToPDF creates a basic PDF from plain text. +func textToPDF(text, outputPath string) error { + lines := strings.Split(text, "\n") + var content strings.Builder + + content.WriteString("%PDF-1.4\n") + content.WriteString("1 0 obj\n<< /Type /Catalog /Pages 2 0 R >>\nendobj\n") + content.WriteString("2 0 obj\n<< /Type /Pages /Kids [3 0 R] /Count 1 >>\nendobj\n") + + var stream strings.Builder + stream.WriteString("BT\n/F1 10 Tf\n") + y := 780.0 + for _, line := range lines { + if y < 40 { + break + } + safe := strings.ReplaceAll(line, "\\", "\\\\") + safe = strings.ReplaceAll(safe, "(", "\\(") + safe = strings.ReplaceAll(safe, ")", "\\)") + stream.WriteString(fmt.Sprintf("1 0 0 1 40 %.0f Tm\n(%s) Tj\n", y, safe)) + y -= 14 + } + stream.WriteString("ET\n") + streamBytes := stream.String() + + content.WriteString(fmt.Sprintf("3 0 obj\n<< /Type /Page /Parent 2 0 R /MediaBox [0 0 612 792] /Contents 4 0 R /Resources << /Font << /F1 5 0 R >> >> >>\nendobj\n")) + content.WriteString(fmt.Sprintf("4 0 obj\n<< /Length %d >>\nstream\n%sendstream\nendobj\n", len(streamBytes), streamBytes)) + content.WriteString("5 0 obj\n<< /Type /Font /Subtype /Type1 /BaseFont /Helvetica >>\nendobj\n") + content.WriteString("xref\n0 6\n") + content.WriteString("trailer\n<< /Size 6 /Root 1 0 R >>\nstartxref\n0\n%%EOF\n") + + return os.WriteFile(outputPath, []byte(content.String()), 0o644) +} + +func htmlToPDF(html, outputPath string) error { + text := stripHTMLTags(html) + return textToPDF(text, outputPath) +} diff --git a/cli/internal/converter/ebook.go b/cli/internal/converter/ebook.go new file mode 100644 index 0000000..2223b87 --- /dev/null +++ b/cli/internal/converter/ebook.go @@ -0,0 +1,365 @@ +package converter + +import ( + "archive/zip" + "fmt" + "io" + "os" + "path/filepath" + "regexp" + "sort" + "strings" + "time" + + "github.com/russross/blackfriday/v2" +) + +// convertEbook handles epub ↔ txt/html/md/pdf conversions. +func convertEbook(inputPath, outputPath, sourceExt, targetFormat string) error { + if sourceExt == "epub" { + return convertFromEpub(inputPath, outputPath, targetFormat) + } + // txt/html/md → epub + return convertToEpub(inputPath, outputPath, sourceExt) +} + +// ─── EPUB → other formats ──────────────────────────────────── + +func convertFromEpub(inputPath, outputPath, targetFormat string) error { + title, htmlChapters, err := extractEpubContent(inputPath) + if err != nil { + return fmt.Errorf("reading epub: %w", err) + } + + fullHTML := strings.Join(htmlChapters, "\n
\n") + + switch targetFormat { + case "txt": + text := stripHTMLTags(fullHTML) + return os.WriteFile(outputPath, []byte(text), 0o644) + case "html": + styled := fmt.Sprintf(` + +%s + +

%s

%s`, escapeHTML(title), escapeHTML(title), fullHTML) + return os.WriteFile(outputPath, []byte(styled), 0o644) + case "md": + md := "# " + title + "\n\n" + ebookHTMLToMarkdown(fullHTML) + return os.WriteFile(outputPath, []byte(md), 0o644) + case "pdf": + text := stripHTMLTags(fullHTML) + return textToPDF(text, outputPath) + default: + return fmt.Errorf("unsupported target for epub: %s", targetFormat) + } +} + +// ─── Other formats → EPUB ──────────────────────────────────── + +func convertToEpub(inputPath, outputPath, sourceExt string) error { + raw, err := os.ReadFile(inputPath) + if err != nil { + return fmt.Errorf("reading input: %w", err) + } + content := string(raw) + title := strings.TrimSuffix(filepath.Base(inputPath), filepath.Ext(inputPath)) + + var htmlContent string + switch sourceExt { + case "txt": + // Split into paragraphs on double newlines + paragraphs := strings.Split(content, "\n\n") + var sb strings.Builder + for _, p := range paragraphs { + p = strings.TrimSpace(p) + if p != "" { + sb.WriteString("

" + escapeHTML(p) + "

\n") + } + } + htmlContent = sb.String() + case "html", "htm": + // Extract body if full document + bodyRe := regexp.MustCompile(`(?is)]*>(.*)`) + if m := bodyRe.FindStringSubmatch(content); m != nil { + htmlContent = m[1] + } else { + htmlContent = content + } + case "md": + htmlBytes := blackfriday.Run([]byte(content)) + htmlContent = string(htmlBytes) + default: + return fmt.Errorf("unsupported source for epub creation: %s", sourceExt) + } + + return writeEpubFile(outputPath, title, htmlContent) +} + +// ─── EPUB reader ───────────────────────────────────────────── + +func extractEpubContent(path string) (string, []string, error) { + r, err := zip.OpenReader(path) + if err != nil { + return "", nil, err + } + defer r.Close() + + title := "Untitled" + var htmlChapters []string + + // Find OPF file via container.xml + var opfPath string + for _, f := range r.File { + if f.Name == "META-INF/container.xml" { + data, err := readZipFile(f) + if err != nil { + break + } + re := regexp.MustCompile(`full-path="([^"]+)"`) + if m := re.FindStringSubmatch(string(data)); m != nil { + opfPath = m[1] + } + break + } + } + + if opfPath != "" { + // Read OPF + opfContent := "" + for _, f := range r.File { + if f.Name == opfPath { + data, err := readZipFile(f) + if err == nil { + opfContent = string(data) + } + break + } + } + + if opfContent != "" { + // Extract title + titleRe := regexp.MustCompile(`]*>([^<]+)`) + if m := titleRe.FindStringSubmatch(opfContent); m != nil { + title = m[1] + } + + // Build manifest map: id -> href + manifest := make(map[string]string) + itemRe := regexp.MustCompile(`]*id="([^"]*)"[^>]*href="([^"]*)"[^>]*`) + for _, m := range itemRe.FindAllStringSubmatch(opfContent, -1) { + manifest[m[1]] = m[2] + } + // Also handle reversed attr order + itemRe2 := regexp.MustCompile(`]*href="([^"]*)"[^>]*id="([^"]*)"[^>]*`) + for _, m := range itemRe2.FindAllStringSubmatch(opfContent, -1) { + manifest[m[2]] = m[1] + } + + // Get spine order + var spineIDs []string + spineRe := regexp.MustCompile(`]*idref="([^"]*)"[^>]*`) + for _, m := range spineRe.FindAllStringSubmatch(opfContent, -1) { + spineIDs = append(spineIDs, m[1]) + } + + // Resolve relative to OPF dir + opfDir := "" + if idx := strings.LastIndex(opfPath, "/"); idx >= 0 { + opfDir = opfPath[:idx+1] + } + + // Build a map of zip files for quick lookup + zipFiles := make(map[string]*zip.File) + for _, f := range r.File { + zipFiles[f.Name] = f + } + + for _, id := range spineIDs { + href, ok := manifest[id] + if !ok { + continue + } + fullPath := opfDir + href + zf, ok := zipFiles[fullPath] + if !ok { + continue + } + data, err := readZipFile(zf) + if err != nil { + continue + } + // Extract body content + bodyRe := regexp.MustCompile(`(?is)]*>(.*)`) + if m := bodyRe.FindStringSubmatch(string(data)); m != nil { + htmlChapters = append(htmlChapters, m[1]) + } else { + htmlChapters = append(htmlChapters, string(data)) + } + } + } + } + + // Fallback: scan for any xhtml/html files + if len(htmlChapters) == 0 { + var htmlFiles []*zip.File + htmlRe := regexp.MustCompile(`(?i)\.(x?html?)$`) + for _, f := range r.File { + if htmlRe.MatchString(f.Name) { + htmlFiles = append(htmlFiles, f) + } + } + sort.Slice(htmlFiles, func(i, j int) bool { + return htmlFiles[i].Name < htmlFiles[j].Name + }) + for _, f := range htmlFiles { + data, err := readZipFile(f) + if err != nil { + continue + } + bodyRe := regexp.MustCompile(`(?is)]*>(.*)`) + if m := bodyRe.FindStringSubmatch(string(data)); m != nil { + htmlChapters = append(htmlChapters, m[1]) + } else { + htmlChapters = append(htmlChapters, string(data)) + } + } + } + + return title, htmlChapters, nil +} + +func readZipFile(f *zip.File) ([]byte, error) { + rc, err := f.Open() + if err != nil { + return nil, err + } + defer rc.Close() + return io.ReadAll(rc) +} + +// ─── EPUB writer ───────────────────────────────────────────── + +func writeEpubFile(outputPath, title, htmlContent string) error { + f, err := os.Create(outputPath) + if err != nil { + return err + } + defer f.Close() + + w := zip.NewWriter(f) + defer w.Close() + + uid := fmt.Sprintf("transmute-%d", time.Now().UnixNano()) + modified := time.Now().UTC().Format("2006-01-02T15:04:05Z") + + // mimetype (must be stored, not compressed) + mimeHeader := &zip.FileHeader{ + Name: "mimetype", + Method: zip.Store, + } + mw, err := w.CreateHeader(mimeHeader) + if err != nil { + return err + } + mw.Write([]byte("application/epub+zip")) + + // META-INF/container.xml + cw, _ := w.Create("META-INF/container.xml") + cw.Write([]byte(` + + + + +`)) + + // OEBPS/content.opf + ow, _ := w.Create("OEBPS/content.opf") + ow.Write([]byte(fmt.Sprintf(` + + + %s + %s + en + %s + + + + + + + + +`, uid, escapeHTML(title), modified))) + + // OEBPS/nav.xhtml + nw, _ := w.Create("OEBPS/nav.xhtml") + nw.Write([]byte(fmt.Sprintf(` + + +Navigation + + + +`, escapeHTML(title)))) + + // OEBPS/chapter1.xhtml + chw, _ := w.Create("OEBPS/chapter1.xhtml") + chw.Write([]byte(fmt.Sprintf(` + + +%s + + + +%s + +`, escapeHTML(title), htmlContent))) + + return nil +} + +// ─── Helpers ───────────────────────────────────────────────── + +func ebookHTMLToMarkdown(html string) string { + md := html + // Headers + for i := 6; i >= 1; i-- { + prefix := strings.Repeat("#", i) + " " + openTag := fmt.Sprintf("", i) + closeTag := fmt.Sprintf("", i) + md = strings.ReplaceAll(md, openTag, prefix) + md = strings.ReplaceAll(md, closeTag, "\n\n") + // Also case-insensitive with attributes + re := regexp.MustCompile(fmt.Sprintf(`(?i)]*>`, i)) + md = re.ReplaceAllString(md, prefix) + re2 := regexp.MustCompile(fmt.Sprintf(`(?i)`, i)) + md = re2.ReplaceAllString(md, "\n\n") + } + md = strings.ReplaceAll(md, "", "**") + md = strings.ReplaceAll(md, "", "**") + md = strings.ReplaceAll(md, "", "**") + md = strings.ReplaceAll(md, "", "**") + md = strings.ReplaceAll(md, "", "*") + md = strings.ReplaceAll(md, "", "*") + md = strings.ReplaceAll(md, "", "*") + md = strings.ReplaceAll(md, "", "*") + md = strings.ReplaceAll(md, "
", "\n") + md = strings.ReplaceAll(md, "
", "\n") + md = strings.ReplaceAll(md, "
", "\n") + md = strings.ReplaceAll(md, "

", "\n") + md = strings.ReplaceAll(md, "

", "\n") + md = strings.ReplaceAll(md, "
", "\n---\n") + md = strings.ReplaceAll(md, "
", "\n---\n") + md = strings.ReplaceAll(md, "
", "\n---\n") + // Strip remaining tags + md = stripHTMLTags(md) + return strings.TrimSpace(md) +} diff --git a/cli/internal/converter/font.go b/cli/internal/converter/font.go new file mode 100644 index 0000000..8d54c39 --- /dev/null +++ b/cli/internal/converter/font.go @@ -0,0 +1,91 @@ +package converter + +import ( + "fmt" + "os" + "os/exec" + "strings" +) + +// convertFont handles font format conversions. +// Go doesn't have native font conversion libraries, so we use fonttools (Python) or ffmpeg +// as external dependencies. For a CLI tool this is acceptable — we check for fonttools first. +func convertFont(inputPath, outputPath, sourceExt, targetFormat string) error { + // Font conversion is complex — the most reliable approach is using fonttools/pyftsubset. + // We'll try a simple copy-based approach for woff/woff2 ↔ ttf/otf since the underlying + // data is similar, but for proper conversion we'd need external tools. + + // For now, provide a clear error explaining what's needed. + // In the future we could bundle a Go-native font converter or auto-install fonttools. + + switch { + case (sourceExt == "ttf" || sourceExt == "otf") && (targetFormat == "woff" || targetFormat == "woff2"): + return fontConvertViaFFmpeg(inputPath, outputPath) + case (sourceExt == "woff" || sourceExt == "woff2") && (targetFormat == "ttf" || targetFormat == "otf"): + return fontConvertViaFFmpeg(inputPath, outputPath) + case sourceExt == "ttf" && targetFormat == "otf": + return fontConvertViaFFmpeg(inputPath, outputPath) + case sourceExt == "otf" && targetFormat == "ttf": + return fontConvertViaFFmpeg(inputPath, outputPath) + case sourceExt == "woff" && targetFormat == "woff2": + return fontConvertViaFFmpeg(inputPath, outputPath) + case sourceExt == "woff2" && targetFormat == "woff": + return fontConvertViaFFmpeg(inputPath, outputPath) + default: + return fmt.Errorf("font conversion from %s to %s is not supported", sourceExt, targetFormat) + } +} + +// fontConvertViaFFmpeg attempts font conversion. FFmpeg doesn't actually handle fonts, +// so we check for fonttools (Python pyftsubset/fonttools CLI). +func fontConvertViaFFmpeg(inputPath, outputPath string) error { + // Check if fonttools is available + // fonttools provides `pyftsubset` and `fonttools` CLI + // For simple conversions: fonttools ttLib can convert between formats + + // Write a small Python script to do the conversion + script := fmt.Sprintf(` +import sys +try: + from fontTools.ttLib import TTFont + font = TTFont("%s") + font.save("%s") + print("OK") +except ImportError: + print("ERROR: fonttools not installed. Run: pip install fonttools brotli") + sys.exit(1) +except Exception as e: + print(f"ERROR: {e}") + sys.exit(1) +`, inputPath, outputPath) + + tmpScript, err := os.CreateTemp("", "transmute-font-*.py") + if err != nil { + return fmt.Errorf("creating temp script: %w", err) + } + defer os.Remove(tmpScript.Name()) + + if _, err := tmpScript.WriteString(script); err != nil { + tmpScript.Close() + return err + } + tmpScript.Close() + + // Try python3 first, then python + for _, pyCmd := range []string{"python3", "python"} { + output, err := runPython(pyCmd, tmpScript.Name()) + if err == nil { + if output == "OK" || len(output) > 0 { + return nil + } + } + } + + return fmt.Errorf("font conversion requires Python + fonttools. Install with: pip install fonttools brotli") +} + +func runPython(python, scriptPath string) (string, error) { + cmd := exec.Command(python, scriptPath) + out, err := cmd.CombinedOutput() + return strings.TrimSpace(string(out)), err +} diff --git a/cli/internal/converter/image.go b/cli/internal/converter/image.go new file mode 100644 index 0000000..f4c0a58 --- /dev/null +++ b/cli/internal/converter/image.go @@ -0,0 +1,106 @@ +package converter + +import ( + "fmt" + "image" + "image/gif" + "image/jpeg" + "image/png" + "os" + "strings" + + "golang.org/x/image/bmp" + "golang.org/x/image/tiff" + "golang.org/x/image/webp" +) + +func convertImage(inputPath, outputPath, targetFormat string) error { + f, err := os.Open(inputPath) + if err != nil { + return fmt.Errorf("opening image: %w", err) + } + defer f.Close() + + // Decode input — Go's image package auto-registers png, jpeg, gif via import + // We also need x/image decoders for bmp, tiff, webp + img, format, err := image.Decode(f) + if err != nil { + // Try specific decoders as fallback + f.Seek(0, 0) + img, err = tryDecodeImage(f, inputPath) + if err != nil { + return fmt.Errorf("decoding image (%s): %w", format, err) + } + } + + out, err := os.Create(outputPath) + if err != nil { + return fmt.Errorf("creating output: %w", err) + } + defer out.Close() + + target := strings.ToLower(targetFormat) + switch target { + case "png": + return png.Encode(out, img) + case "jpg", "jpeg": + return jpeg.Encode(out, img, &jpeg.Options{Quality: 92}) + case "gif": + return gif.Encode(out, img, &gif.Options{NumColors: 256}) + case "bmp": + return bmp.Encode(out, img) + case "tiff", "tif": + return tiff.Encode(out, img, &tiff.Options{Compression: tiff.Deflate}) + case "webp": + // Go doesn't have a webp encoder in stdlib. Use ffmpeg as fallback. + out.Close() + os.Remove(outputPath) + return convertImageViaFFmpeg(inputPath, outputPath, target) + case "avif": + out.Close() + os.Remove(outputPath) + return convertImageViaFFmpeg(inputPath, outputPath, target) + case "ico": + // ICO is just a small PNG wrapped in ICO container for simple cases. + // We'll convert to PNG via ffmpeg or write a 256x256 PNG for now. + out.Close() + os.Remove(outputPath) + return convertImageViaFFmpeg(inputPath, outputPath, target) + default: + out.Close() + os.Remove(outputPath) + return fmt.Errorf("unsupported image target format: %s", target) + } +} + +func tryDecodeImage(f *os.File, path string) (image.Image, error) { + ext := strings.ToLower(path) + switch { + case strings.HasSuffix(ext, ".webp"): + return webp.Decode(f) + case strings.HasSuffix(ext, ".bmp"): + return bmp.Decode(f) + case strings.HasSuffix(ext, ".tiff"), strings.HasSuffix(ext, ".tif"): + return tiff.Decode(f) + default: + return nil, fmt.Errorf("unable to decode image: %s", path) + } +} + +func convertImageViaFFmpeg(inputPath, outputPath, format string) error { + args := []string{"-y", "-i", inputPath} + + switch format { + case "webp": + args = append(args, "-quality", "90", outputPath) + case "avif": + args = append(args, "-c:v", "libaom-av1", "-still-picture", "1", outputPath) + case "ico": + // Scale to 256x256 for ICO + args = append(args, "-vf", "scale=256:256:force_original_aspect_ratio=decrease,pad=256:256:(ow-iw)/2:(oh-ih)/2", outputPath) + default: + args = append(args, outputPath) + } + + return mediaConvert(inputPath, outputPath, format, args) +} diff --git a/cli/internal/converter/media.go b/cli/internal/converter/media.go new file mode 100644 index 0000000..72521ce --- /dev/null +++ b/cli/internal/converter/media.go @@ -0,0 +1,64 @@ +package converter + +import ( + "fmt" + + "github.com/noauf/transmute-cli/internal/ffmpeg" +) + +func convertMedia(inputPath, outputPath, targetFormat string) error { + args := buildFFmpegArgs(inputPath, outputPath, targetFormat) + return mediaConvert(inputPath, outputPath, targetFormat, args) +} + +func mediaConvert(inputPath, outputPath, targetFormat string, args []string) error { + if !ffmpeg.IsAvailable() { + return fmt.Errorf("ffmpeg is required for %s conversion — run `transmute --install-ffmpeg` to install it", targetFormat) + } + return ffmpeg.Run(args...) +} + +func buildFFmpegArgs(inputPath, outputPath, targetFormat string) []string { + args := []string{"-y", "-i", inputPath} + + switch targetFormat { + // Audio + case "mp3": + args = append(args, "-codec:a", "libmp3lame", "-q:a", "2", outputPath) + case "wav": + args = append(args, "-codec:a", "pcm_s16le", outputPath) + case "flac": + args = append(args, "-codec:a", "flac", outputPath) + case "ogg": + args = append(args, "-codec:a", "libvorbis", "-q:a", "6", outputPath) + case "aac": + args = append(args, "-codec:a", "aac", "-b:a", "192k", outputPath) + case "m4a": + args = append(args, "-codec:a", "aac", "-b:a", "192k", outputPath) + case "opus": + args = append(args, "-codec:a", "libopus", "-b:a", "128k", outputPath) + + // Video + case "mp4": + args = append(args, "-codec:v", "libx264", "-preset", "medium", "-crf", "23", + "-codec:a", "aac", "-b:a", "192k", outputPath) + case "webm": + args = append(args, "-codec:v", "libvpx-vp9", "-crf", "30", "-b:v", "0", + "-codec:a", "libvorbis", outputPath) + case "avi": + args = append(args, "-codec:v", "mpeg4", "-q:v", "5", + "-codec:a", "libmp3lame", "-q:a", "4", outputPath) + case "mov": + args = append(args, "-codec:v", "libx264", "-preset", "medium", "-crf", "23", + "-codec:a", "aac", "-b:a", "192k", outputPath) + case "mkv": + args = append(args, "-codec:v", "libx264", "-preset", "medium", "-crf", "23", + "-codec:a", "aac", "-b:a", "192k", outputPath) + + default: + // Generic: let ffmpeg figure it out from the extension + args = append(args, outputPath) + } + + return args +} diff --git a/cli/internal/converter/spreadsheet.go b/cli/internal/converter/spreadsheet.go new file mode 100644 index 0000000..a180d2b --- /dev/null +++ b/cli/internal/converter/spreadsheet.go @@ -0,0 +1,161 @@ +package converter + +import ( + "encoding/csv" + "encoding/json" + "fmt" + "os" + "strings" + + "github.com/xuri/excelize/v2" + "gopkg.in/yaml.v3" +) + +func convertSpreadsheet(inputPath, outputPath, sourceExt, targetFormat string) error { + // Read spreadsheet using excelize (supports xlsx, xls via xlsx conversion) + f, err := excelize.OpenFile(inputPath) + if err != nil { + return fmt.Errorf("opening spreadsheet: %w", err) + } + defer f.Close() + + // Get first sheet + sheetName := f.GetSheetName(0) + if sheetName == "" { + return fmt.Errorf("no sheets found in spreadsheet") + } + + rows, err := f.GetRows(sheetName) + if err != nil { + return fmt.Errorf("reading rows: %w", err) + } + + if len(rows) == 0 { + return fmt.Errorf("spreadsheet is empty") + } + + headers := rows[0] + dataRows := rows[1:] + + switch targetFormat { + case "csv": + return writeSpreadsheetDelimited(headers, dataRows, outputPath, ',') + case "tsv": + return writeSpreadsheetDelimited(headers, dataRows, outputPath, '\t') + case "json": + return writeSpreadsheetJSON(headers, dataRows, outputPath) + case "yaml", "yml": + return writeSpreadsheetYAML(headers, dataRows, outputPath) + case "xml": + return writeSpreadsheetXML(headers, dataRows, outputPath) + case "html": + return writeSpreadsheetHTML(headers, dataRows, outputPath) + default: + return fmt.Errorf("unsupported target for spreadsheet: %s", targetFormat) + } +} + +func writeSpreadsheetDelimited(headers []string, rows [][]string, outputPath string, sep rune) error { + f, err := os.Create(outputPath) + if err != nil { + return err + } + defer f.Close() + + w := csv.NewWriter(f) + w.Comma = sep + if err := w.Write(headers); err != nil { + return err + } + for _, row := range rows { + // Pad row to match header length + for len(row) < len(headers) { + row = append(row, "") + } + if err := w.Write(row[:len(headers)]); err != nil { + return err + } + } + w.Flush() + return w.Error() +} + +func writeSpreadsheetJSON(headers []string, rows [][]string, outputPath string) error { + var records []map[string]string + for _, row := range rows { + record := make(map[string]string) + for i, h := range headers { + if i < len(row) { + record[h] = row[i] + } else { + record[h] = "" + } + } + records = append(records, record) + } + b, err := json.MarshalIndent(records, "", " ") + if err != nil { + return err + } + return os.WriteFile(outputPath, b, 0o644) +} + +func writeSpreadsheetYAML(headers []string, rows [][]string, outputPath string) error { + var records []map[string]string + for _, row := range rows { + record := make(map[string]string) + for i, h := range headers { + if i < len(row) { + record[h] = row[i] + } else { + record[h] = "" + } + } + records = append(records, record) + } + b, err := yaml.Marshal(records) + if err != nil { + return err + } + return os.WriteFile(outputPath, b, 0o644) +} + +func writeSpreadsheetXML(headers []string, rows [][]string, outputPath string) error { + var sb strings.Builder + sb.WriteString("\n\n") + for _, row := range rows { + sb.WriteString(" \n") + for i, h := range headers { + val := "" + if i < len(row) { + val = row[i] + } + sb.WriteString(fmt.Sprintf(" <%s>%s\n", h, val, h)) + } + sb.WriteString(" \n") + } + sb.WriteString("") + return os.WriteFile(outputPath, []byte(sb.String()), 0o644) +} + +func writeSpreadsheetHTML(headers []string, rows [][]string, outputPath string) error { + var sb strings.Builder + sb.WriteString("\n\n\n") + for _, h := range headers { + sb.WriteString("") + } + sb.WriteString("\n\n\n") + for _, row := range rows { + sb.WriteString("") + for i := range headers { + val := "" + if i < len(row) { + val = row[i] + } + sb.WriteString("") + } + sb.WriteString("\n") + } + sb.WriteString("\n
" + h + "
" + val + "
\n") + return os.WriteFile(outputPath, []byte(sb.String()), 0o644) +} diff --git a/cli/internal/detect/detect.go b/cli/internal/detect/detect.go new file mode 100644 index 0000000..7b4a9d9 --- /dev/null +++ b/cli/internal/detect/detect.go @@ -0,0 +1,236 @@ +package detect + +// FileCategory represents the type of file +type FileCategory string + +const ( + CategoryImage FileCategory = "image" + CategoryDocument FileCategory = "document" + CategoryAudio FileCategory = "audio" + CategoryVideo FileCategory = "video" + CategoryData FileCategory = "data" + CategoryFont FileCategory = "font" + CategorySpreadsheet FileCategory = "spreadsheet" + CategoryUnknown FileCategory = "unknown" +) + +// extensionMap maps file extensions to categories +var extensionMap = map[string]FileCategory{ + // Images + "png": CategoryImage, "jpg": CategoryImage, "jpeg": CategoryImage, + "webp": CategoryImage, "gif": CategoryImage, "bmp": CategoryImage, + "tiff": CategoryImage, "tif": CategoryImage, "avif": CategoryImage, + "svg": CategoryImage, "ico": CategoryImage, + "heic": CategoryImage, "heif": CategoryImage, + "psd": CategoryImage, + + // Documents + "pdf": CategoryDocument, "docx": CategoryDocument, "txt": CategoryDocument, + "md": CategoryDocument, "html": CategoryDocument, "htm": CategoryDocument, + "rtf": CategoryDocument, "epub": CategoryDocument, "pptx": CategoryDocument, + + // Audio + "mp3": CategoryAudio, "wav": CategoryAudio, "flac": CategoryAudio, + "ogg": CategoryAudio, "aac": CategoryAudio, "m4a": CategoryAudio, + "wma": CategoryAudio, "opus": CategoryAudio, + + // Video + "mp4": CategoryVideo, "webm": CategoryVideo, "avi": CategoryVideo, + "mov": CategoryVideo, "mkv": CategoryVideo, "flv": CategoryVideo, + "wmv": CategoryVideo, "m4v": CategoryVideo, + + // Data + "csv": CategoryData, "json": CategoryData, "xml": CategoryData, + "yaml": CategoryData, "yml": CategoryData, "tsv": CategoryData, + "toml": CategoryData, + "ini": CategoryData, "env": CategoryData, "properties": CategoryData, + "ndjson": CategoryData, "jsonl": CategoryData, "sql": CategoryData, + + // Spreadsheets + "xlsx": CategorySpreadsheet, "xls": CategorySpreadsheet, "ods": CategorySpreadsheet, + + // Fonts + "ttf": CategoryFont, "otf": CategoryFont, "woff": CategoryFont, "woff2": CategoryFont, +} + +// conversionMap maps each extension to its available target formats +// This matches the web app's conversion map exactly. +var conversionMap = map[string][]string{ + // ─── Images ────────────────────────────────────────────── + "png": {"jpg", "webp", "gif", "bmp", "avif", "tiff", "ico"}, + "jpg": {"png", "webp", "gif", "bmp", "avif", "tiff", "ico"}, + "jpeg": {"png", "webp", "gif", "bmp", "avif", "tiff", "ico"}, + "webp": {"png", "jpg", "gif", "bmp", "avif", "tiff", "ico"}, + "gif": {"png", "jpg", "webp", "bmp", "avif", "tiff"}, + "bmp": {"png", "jpg", "webp", "gif", "avif", "tiff"}, + "tiff": {"png", "jpg", "webp", "gif", "bmp", "avif"}, + "tif": {"png", "jpg", "webp", "gif", "bmp", "avif"}, + "avif": {"png", "jpg", "webp", "gif", "bmp", "tiff"}, + "svg": {"png", "jpg", "webp", "gif", "bmp", "avif", "tiff"}, + "ico": {"png", "jpg", "webp", "gif", "bmp"}, + "heic": {"png", "jpg", "webp", "gif", "bmp", "avif", "tiff"}, + "heif": {"png", "jpg", "webp", "gif", "bmp", "avif", "tiff"}, + "psd": {"png", "jpg", "webp", "gif", "bmp", "avif", "tiff", "ico"}, + + // ─── Documents ─────────────────────────────────────────── + "pdf": {"txt", "html", "md", "docx", "epub"}, + "docx": {"pdf", "html", "txt", "md", "epub"}, + "md": {"html", "pdf", "txt", "docx", "epub", "pptx"}, + "html": {"pdf", "txt", "md", "docx", "epub", "pptx"}, + "htm": {"pdf", "txt", "md", "docx", "epub", "pptx"}, + "txt": {"pdf", "html", "md", "docx", "epub", "pptx"}, + "rtf": {"txt", "html", "md", "pdf", "docx"}, + "epub": {"txt", "html", "md", "pdf"}, + "pptx": {"txt", "html", "pdf", "md"}, + + // ─── Audio ─────────────────────────────────────────────── + "mp3": {"wav", "ogg", "aac", "flac", "m4a", "opus"}, + "wav": {"mp3", "ogg", "aac", "flac", "m4a", "opus"}, + "flac": {"mp3", "wav", "ogg", "aac", "m4a", "opus"}, + "ogg": {"mp3", "wav", "aac", "flac", "m4a", "opus"}, + "aac": {"mp3", "wav", "ogg", "flac", "m4a", "opus"}, + "m4a": {"mp3", "wav", "ogg", "flac", "aac", "opus"}, + "wma": {"mp3", "wav", "ogg", "flac", "aac", "m4a"}, + "opus": {"mp3", "wav", "ogg", "flac", "aac", "m4a"}, + + // ─── Video ─────────────────────────────────────────────── + "mp4": {"webm", "avi", "mov", "mkv", "gif", "mp3", "wav", "ogg", "aac", "flac"}, + "webm": {"mp4", "avi", "mov", "mkv", "gif", "mp3", "wav", "ogg", "aac", "flac"}, + "avi": {"mp4", "webm", "mov", "mkv", "gif", "mp3", "wav", "ogg", "aac", "flac"}, + "mov": {"mp4", "webm", "avi", "mkv", "gif", "mp3", "wav", "ogg", "aac", "flac"}, + "mkv": {"mp4", "webm", "avi", "mov", "gif", "mp3", "wav", "ogg", "aac", "flac"}, + "flv": {"mp4", "webm", "avi", "mov", "mkv", "gif", "mp3", "wav", "ogg", "aac", "flac"}, + "wmv": {"mp4", "webm", "avi", "mov", "mkv", "gif", "mp3", "wav", "ogg", "aac", "flac"}, + "m4v": {"mp4", "webm", "avi", "mov", "mkv", "gif", "mp3", "wav", "ogg", "aac", "flac"}, + + // ─── Data ──────────────────────────────────────────────── + "csv": {"json", "xml", "yaml", "tsv", "toml", "xlsx", "ini", "env", "properties", "ndjson", "sql"}, + "json": {"csv", "xml", "yaml", "tsv", "toml", "xlsx", "ini", "env", "properties", "ndjson", "sql"}, + "xml": {"json", "csv", "yaml", "tsv", "toml", "xlsx"}, + "yaml": {"json", "csv", "xml", "tsv", "toml", "xlsx", "ini", "env", "properties", "ndjson", "sql"}, + "yml": {"json", "csv", "xml", "tsv", "toml", "xlsx", "ini", "env", "properties", "ndjson", "sql"}, + "tsv": {"csv", "json", "xml", "yaml", "toml", "xlsx", "ndjson", "sql"}, + "toml": {"json", "csv", "xml", "yaml", "tsv", "xlsx"}, + "ini": {"json", "yaml", "toml", "env", "properties", "xml", "csv"}, + "env": {"json", "yaml", "toml", "ini", "properties", "csv"}, + "properties": {"json", "yaml", "toml", "ini", "env", "csv"}, + "ndjson": {"json", "csv", "tsv", "yaml", "xml", "xlsx", "sql"}, + "jsonl": {"json", "csv", "tsv", "yaml", "xml", "xlsx", "sql"}, + "sql": {"json", "csv", "tsv", "yaml", "xlsx"}, + + // ─── Spreadsheets ──────────────────────────────────────── + "xlsx": {"csv", "json", "tsv", "xml", "yaml", "toml", "ods", "html", "txt", "ndjson", "sql"}, + "xls": {"xlsx", "csv", "json", "tsv", "xml", "yaml", "toml", "ods", "html", "txt", "ndjson", "sql"}, + "ods": {"xlsx", "csv", "json", "tsv", "xml", "yaml", "toml", "html", "txt", "ndjson", "sql"}, + + // ─── Fonts ─────────────────────────────────────────────── + "ttf": {"otf", "woff", "woff2"}, + "otf": {"ttf", "woff", "woff2"}, + "woff": {"ttf", "otf", "woff2"}, + "woff2": {"ttf", "otf", "woff"}, +} + +// defaultTargets maps extensions to their preferred default target. +// Matches web app defaults exactly. +var defaultTargets = map[string]string{ + // Images -> WebP (modern, smaller) + "png": "webp", "jpg": "webp", "jpeg": "webp", "gif": "webp", + "bmp": "png", "tiff": "png", "tif": "png", "avif": "png", "svg": "png", "ico": "png", + "heic": "jpg", "heif": "jpg", "psd": "png", + // Documents -> PDF (except PDF -> DOCX) + "docx": "pdf", "md": "html", "html": "pdf", "htm": "pdf", "txt": "pdf", + "pdf": "docx", "rtf": "docx", "epub": "html", "pptx": "pdf", + // Audio -> MP3 + "wav": "mp3", "flac": "mp3", "ogg": "mp3", "aac": "mp3", "m4a": "mp3", + "wma": "mp3", "opus": "mp3", "mp3": "wav", + // Video -> MP4 + "avi": "mp4", "mov": "mp4", "mkv": "mp4", "flv": "mp4", "wmv": "mp4", + "m4v": "mp4", "mp4": "webm", "webm": "mp4", + // Data -> JSON + "csv": "json", "xml": "json", "yaml": "json", "yml": "json", "tsv": "csv", + "json": "csv", "toml": "json", + "ini": "json", "env": "json", "properties": "json", + "ndjson": "json", "jsonl": "json", "sql": "json", + // Spreadsheets -> CSV + "xlsx": "csv", "xls": "csv", "ods": "csv", + // Fonts -> WOFF2 (modern web standard) + "ttf": "woff2", "otf": "woff2", "woff": "woff2", "woff2": "ttf", +} + +// DetectCategory returns the category for a given file extension +func DetectCategory(ext string) FileCategory { + if cat, ok := extensionMap[ext]; ok { + return cat + } + return CategoryUnknown +} + +// GetAvailableFormats returns the conversion targets for a given extension +func GetAvailableFormats(ext string) []string { + if fmts, ok := conversionMap[ext]; ok { + return fmts + } + return nil +} + +// GetDefaultTarget returns the preferred default target format for an extension. +func GetDefaultTarget(ext string) string { + if def, ok := defaultTargets[ext]; ok { + return def + } + fmts := GetAvailableFormats(ext) + if len(fmts) > 0 { + return fmts[0] + } + return "" +} + +// IsSupported returns true if the extension is known +func IsSupported(ext string) bool { + _, ok := extensionMap[ext] + return ok +} + +// CategoryLabel returns a human-readable label +func CategoryLabel(cat FileCategory) string { + switch cat { + case CategoryImage: + return "Image" + case CategoryDocument: + return "Document" + case CategoryAudio: + return "Audio" + case CategoryVideo: + return "Video" + case CategoryData: + return "Data" + case CategoryFont: + return "Font" + case CategorySpreadsheet: + return "Spreadsheet" + default: + return "Unknown" + } +} + +// CategoryIcon returns a unicode icon for the category +func CategoryIcon(cat FileCategory) string { + switch cat { + case CategoryImage: + return "\U0001f5bc" // framed picture + case CategoryDocument: + return "\U0001f4c4" // page facing up + case CategoryAudio: + return "\U0001f3b5" // musical note + case CategoryVideo: + return "\U0001f3ac" // clapper board + case CategoryData: + return "\U0001f4ca" // bar chart + case CategoryFont: + return "\U0001f524" // input latin letters + case CategorySpreadsheet: + return "\U0001f4cb" // clipboard + default: + return "\U0001f4c1" // file folder + } +} diff --git a/cli/internal/ffmpeg/ffmpeg.go b/cli/internal/ffmpeg/ffmpeg.go new file mode 100644 index 0000000..8de90ed --- /dev/null +++ b/cli/internal/ffmpeg/ffmpeg.go @@ -0,0 +1,330 @@ +package ffmpeg + +import ( + "archive/tar" + "archive/zip" + "compress/gzip" + "errors" + "fmt" + "io" + "net/http" + "os" + "os/exec" + "path/filepath" + "runtime" + "strings" +) + +// cacheDir returns the directory where transmute stores its ffmpeg binary. +// ~/.transmute/bin/ +func cacheDir() (string, error) { + home, err := os.UserHomeDir() + if err != nil { + return "", err + } + return filepath.Join(home, ".transmute", "bin"), nil +} + +// BinaryPath returns the expected path to the ffmpeg binary inside our cache. +func BinaryPath() (string, error) { + dir, err := cacheDir() + if err != nil { + return "", err + } + name := "ffmpeg" + if runtime.GOOS == "windows" { + name = "ffmpeg.exe" + } + return filepath.Join(dir, name), nil +} + +// Resolve returns a usable ffmpeg path. It checks: +// 1. Our managed cache dir +// 2. System PATH +// Returns empty string + error if not found anywhere. +func Resolve() (string, error) { + // Check our cache first + p, err := BinaryPath() + if err == nil { + if _, statErr := os.Stat(p); statErr == nil { + return p, nil + } + } + + // Check system PATH + if sysPath, err := exec.LookPath("ffmpeg"); err == nil { + return sysPath, nil + } + + return "", errors.New("ffmpeg not found — run `transmute --install-ffmpeg` or install it manually") +} + +// IsAvailable returns true if ffmpeg can be resolved. +func IsAvailable() bool { + _, err := Resolve() + return err == nil +} + +// downloadURL returns the URL for a static ffmpeg build for the current platform. +// Uses https://github.com/eugeneware/ffmpeg-static releases (widely used, MIT). +func downloadURL() (string, error) { + goos := runtime.GOOS + goarch := runtime.GOARCH + + // Map Go os/arch to ffmpeg-static naming + var platform string + switch { + case goos == "darwin" && goarch == "arm64": + platform = "darwin-arm64" + case goos == "darwin" && goarch == "amd64": + platform = "darwin-x64" + case goos == "linux" && goarch == "amd64": + platform = "linux-x64" + case goos == "linux" && goarch == "arm64": + platform = "linux-arm64" + case goos == "windows" && goarch == "amd64": + platform = "win32-x64" + default: + return "", fmt.Errorf("unsupported platform: %s/%s", goos, goarch) + } + + // Use johnvansickle static builds for linux, evermeet for mac, gyan.dev for windows + switch goos { + case "darwin": + // evermeet.cx provides universal macOS ffmpeg builds + return "https://evermeet.cx/ffmpeg/getrelease/zip", nil + case "linux": + // johnvansickle provides static Linux builds + base := "https://johnvansickle.com/ffmpeg/releases/" + switch goarch { + case "amd64": + return base + "ffmpeg-release-amd64-static.tar.xz", nil + case "arm64": + return base + "ffmpeg-release-arm64-static.tar.xz", nil + } + case "windows": + return "https://www.gyan.dev/ffmpeg/builds/ffmpeg-release-essentials.zip", nil + } + + _ = platform // suppress unused + return "", fmt.Errorf("unsupported platform: %s/%s", goos, goarch) +} + +// Download fetches and installs ffmpeg into ~/.transmute/bin/. +// The progress callback receives bytes downloaded so far. +func Download(progress func(downloaded int64)) error { + url, err := downloadURL() + if err != nil { + return err + } + + dir, err := cacheDir() + if err != nil { + return err + } + if err := os.MkdirAll(dir, 0o755); err != nil { + return fmt.Errorf("creating cache dir: %w", err) + } + + binPath, err := BinaryPath() + if err != nil { + return err + } + + // Download to temp file + resp, err := http.Get(url) //nolint:gosec + if err != nil { + return fmt.Errorf("downloading ffmpeg: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("download failed: HTTP %d", resp.StatusCode) + } + + tmpFile, err := os.CreateTemp(dir, "ffmpeg-download-*") + if err != nil { + return fmt.Errorf("creating temp file: %w", err) + } + tmpPath := tmpFile.Name() + defer os.Remove(tmpPath) + + var reader io.Reader = resp.Body + if progress != nil { + reader = &progressReader{r: resp.Body, fn: progress} + } + + if _, err := io.Copy(tmpFile, reader); err != nil { + tmpFile.Close() + return fmt.Errorf("saving download: %w", err) + } + tmpFile.Close() + + // Extract based on file type + switch { + case strings.HasSuffix(url, ".zip"): + if err := extractFromZip(tmpPath, binPath); err != nil { + return err + } + case strings.HasSuffix(url, ".tar.xz"), strings.HasSuffix(url, ".tar.gz"): + if err := extractFromTarball(tmpPath, binPath); err != nil { + return err + } + default: + // Direct binary + if err := os.Rename(tmpPath, binPath); err != nil { + return err + } + } + + // Make executable + return os.Chmod(binPath, 0o755) +} + +func extractFromZip(zipPath, destBin string) error { + r, err := zip.OpenReader(zipPath) + if err != nil { + return fmt.Errorf("opening zip: %w", err) + } + defer r.Close() + + for _, f := range r.File { + name := filepath.Base(f.Name) + if name == "ffmpeg" || name == "ffmpeg.exe" { + rc, err := f.Open() + if err != nil { + return err + } + defer rc.Close() + + out, err := os.Create(destBin) + if err != nil { + return err + } + defer out.Close() + + _, err = io.Copy(out, rc) + return err + } + } + return errors.New("ffmpeg binary not found in zip archive") +} + +func extractFromTarball(tarPath, destBin string) error { + f, err := os.Open(tarPath) + if err != nil { + return err + } + defer f.Close() + + var reader io.Reader + // Try gzip first — xz would need a separate lib, but we'll handle .tar.gz here + gz, err := gzip.NewReader(f) + if err != nil { + // Not gzip — for .tar.xz we'd need an xz decompressor. + // Fallback: try to use system xz command + f.Close() + return extractWithSystemXZ(tarPath, destBin) + } + defer gz.Close() + reader = gz + + tr := tar.NewReader(reader) + for { + hdr, err := tr.Next() + if err == io.EOF { + break + } + if err != nil { + return err + } + name := filepath.Base(hdr.Name) + if name == "ffmpeg" { + out, err := os.Create(destBin) + if err != nil { + return err + } + defer out.Close() + _, err = io.Copy(out, tr) + return err + } + } + return errors.New("ffmpeg binary not found in tarball") +} + +func extractWithSystemXZ(tarPath, destBin string) error { + // Use system xz to decompress, then extract with tar + dir := filepath.Dir(destBin) + cmd := exec.Command("sh", "-c", + fmt.Sprintf("xz -dc %q | tar -xf - -C %q --strip-components=1", tarPath, dir)) + if err := cmd.Run(); err != nil { + return fmt.Errorf("extracting with xz: %w (is xz installed?)", err) + } + + // Look for the ffmpeg binary in the extracted files + extracted := filepath.Join(dir, "ffmpeg") + if _, err := os.Stat(extracted); err == nil { + return nil // Already in the right place + } + + // Walk to find it + var found string + filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return nil + } + if filepath.Base(path) == "ffmpeg" && !info.IsDir() { + found = path + return filepath.SkipAll + } + return nil + }) + + if found == "" { + return errors.New("ffmpeg binary not found after extraction") + } + if found != destBin { + return os.Rename(found, destBin) + } + return nil +} + +// Run executes ffmpeg with the given arguments, returning combined output on error. +func Run(args ...string) error { + bin, err := Resolve() + if err != nil { + return err + } + cmd := exec.Command(bin, args...) + output, err := cmd.CombinedOutput() + if err != nil { + return fmt.Errorf("ffmpeg error: %w\n%s", err, string(output)) + } + return nil +} + +// RunWithOutput executes ffmpeg and returns stdout. +func RunWithOutput(args ...string) ([]byte, error) { + bin, err := Resolve() + if err != nil { + return nil, err + } + cmd := exec.Command(bin, args...) + return cmd.CombinedOutput() +} + +// progressReader wraps an io.Reader and reports progress. +type progressReader struct { + r io.Reader + fn func(int64) + total int64 +} + +func (pr *progressReader) Read(p []byte) (int, error) { + n, err := pr.r.Read(p) + pr.total += int64(n) + if pr.fn != nil { + pr.fn(pr.total) + } + return n, err +} diff --git a/cli/internal/theme/theme.go b/cli/internal/theme/theme.go new file mode 100644 index 0000000..81fbc12 --- /dev/null +++ b/cli/internal/theme/theme.go @@ -0,0 +1,180 @@ +package theme + +import ( + "image/color" + "strings" + + "charm.land/lipgloss/v2" +) + +// ─── Pastel palette (matching the web app) ─────────────────── + +var ( + Pink = lipgloss.Color("#f472b6") + Purple = lipgloss.Color("#a78bfa") + Blue = lipgloss.Color("#60a5fa") + Mint = lipgloss.Color("#34d399") + Orange = lipgloss.Color("#fb923c") + Teal = lipgloss.Color("#2dd4bf") + + Cream = lipgloss.Color("#fdf6ef") + Warm = lipgloss.Color("#faf0e6") + Peach = lipgloss.Color("#fce8d5") + + Dark = lipgloss.Color("#2d1f14") + Mid = lipgloss.Color("#8b7355") + Light = lipgloss.Color("#bfa98a") + + Red = lipgloss.Color("#f43f5e") + DimBg = lipgloss.Color("#f6f6f6") + BorderCl = lipgloss.Color("#e8e0d4") + + // Full-screen background + ScreenBg = lipgloss.Color("#fdf6ef") // Cream — matches the web app +) + +// ─── Category colors ───────────────────────────────────────── + +func CategoryColor(cat string) color.Color { + switch cat { + case "image": + return Pink + case "document": + return Blue + case "audio": + return Purple + case "video": + return Orange + case "data": + return Mint + case "font": + return Teal + case "spreadsheet": + return Mint + default: + return Light + } +} + +// ─── Reusable styles ───────────────────────────────────────── + +var ( + // Title bar + TitleBar = lipgloss.NewStyle(). + Bold(true). + Foreground(Dark). + Background(ScreenBg). + Padding(0, 2) + + // Header / breadcrumb + Breadcrumb = lipgloss.NewStyle(). + Foreground(Mid). + Bold(false) + + BreadcrumbActive = lipgloss.NewStyle(). + Foreground(Dark). + Bold(true) + + // File row + FileName = lipgloss.NewStyle(). + Foreground(Dark). + Bold(true) + + FileSize = lipgloss.NewStyle(). + Foreground(Light) + + ExtBadge = func(c color.Color) lipgloss.Style { + return lipgloss.NewStyle(). + Foreground(c). + Bold(true) + } + + // Status indicators + StatusIdle = lipgloss.NewStyle(). + Foreground(Light). + Italic(true) + + StatusConverting = lipgloss.NewStyle(). + Foreground(Pink). + Bold(true) + + StatusDone = lipgloss.NewStyle(). + Foreground(Mint). + Bold(true) + + StatusError = lipgloss.NewStyle(). + Foreground(Red). + Bold(true) + + // Buttons / actions + ButtonPrimary = lipgloss.NewStyle(). + Foreground(lipgloss.Color("#ffffff")). + Background(Pink). + Bold(true). + Padding(0, 2) + + ButtonSecondary = lipgloss.NewStyle(). + Foreground(lipgloss.Color("#ffffff")). + Background(Mint). + Bold(true). + Padding(0, 2) + + // Progress bar + ProgressFilled = lipgloss.NewStyle(). + Foreground(Pink) + + ProgressEmpty = lipgloss.NewStyle(). + Foreground(BorderCl) + + // Help / footer + Help = lipgloss.NewStyle(). + Foreground(Light). + Italic(true) + + // Cursor / selection + Selected = lipgloss.NewStyle(). + Bold(true). + Foreground(Pink) + + Unselected = lipgloss.NewStyle(). + Foreground(Dark) + + // Divider + Divider = lipgloss.NewStyle(). + Foreground(BorderCl) + + // Logo / branding + Logo = lipgloss.NewStyle(). + Foreground(Pink). + Bold(true) +) + +// PadLine pads a single rendered line to the given width with the screen +// background color. This ensures every line carries the background color +// all the way to the right edge of the terminal. +func PadLine(line string, width int) string { + w := lipgloss.Width(line) + if w >= width { + return line + } + pad := lipgloss.NewStyle(). + Background(ScreenBg). + Render(strings.Repeat(" ", width-w)) + return line + pad +} + +// FillBlankLines returns n blank lines fully painted with the screen +// background color at the given width. +func FillBlankLines(n, width int) string { + if n <= 0 { + return "" + } + blankLine := lipgloss.NewStyle(). + Background(ScreenBg). + Render(strings.Repeat(" ", width)) + lines := make([]string, n) + for i := range lines { + lines[i] = blankLine + } + return strings.Join(lines, "\n") +} diff --git a/cli/internal/tui/keys.go b/cli/internal/tui/keys.go new file mode 100644 index 0000000..626c6f9 --- /dev/null +++ b/cli/internal/tui/keys.go @@ -0,0 +1,78 @@ +package tui + +import "github.com/charmbracelet/bubbles/key" + +// KeyMap defines key bindings for the TUI. +type KeyMap struct { + Up key.Binding + Down key.Binding + Left key.Binding + Right key.Binding + Enter key.Binding + Space key.Binding + Tab key.Binding + Delete key.Binding + SelectAll key.Binding + Convert key.Binding + Quit key.Binding + Help key.Binding + Back key.Binding +} + +// DefaultKeyMap returns the default key bindings. +func DefaultKeyMap() KeyMap { + return KeyMap{ + Up: key.NewBinding( + key.WithKeys("up", "k"), + key.WithHelp("↑/k", "up"), + ), + Down: key.NewBinding( + key.WithKeys("down", "j"), + key.WithHelp("↓/j", "down"), + ), + Left: key.NewBinding( + key.WithKeys("left", "h"), + key.WithHelp("←/h", "prev format"), + ), + Right: key.NewBinding( + key.WithKeys("right", "l"), + key.WithHelp("→/l", "next format"), + ), + Enter: key.NewBinding( + key.WithKeys("enter"), + key.WithHelp("enter", "confirm"), + ), + Space: key.NewBinding( + key.WithKeys(" "), + key.WithHelp("space", "toggle select"), + ), + Tab: key.NewBinding( + key.WithKeys("tab"), + key.WithHelp("tab", "cycle format"), + ), + Delete: key.NewBinding( + key.WithKeys("d", "delete", "backspace"), + key.WithHelp("d", "remove file"), + ), + SelectAll: key.NewBinding( + key.WithKeys("a"), + key.WithHelp("a", "select all"), + ), + Convert: key.NewBinding( + key.WithKeys("c"), + key.WithHelp("c", "convert"), + ), + Quit: key.NewBinding( + key.WithKeys("q", "ctrl+c"), + key.WithHelp("q", "quit"), + ), + Help: key.NewBinding( + key.WithKeys("?"), + key.WithHelp("?", "help"), + ), + Back: key.NewBinding( + key.WithKeys("esc"), + key.WithHelp("esc", "back"), + ), + } +} diff --git a/cli/internal/tui/model.go b/cli/internal/tui/model.go new file mode 100644 index 0000000..d16b1b7 --- /dev/null +++ b/cli/internal/tui/model.go @@ -0,0 +1,404 @@ +package tui + +import ( + "fmt" + "os" + "path/filepath" + "strings" + "time" + + "github.com/charmbracelet/bubbles/key" + tea "github.com/charmbracelet/bubbletea" + + "github.com/noauf/transmute-cli/internal/converter" + "github.com/noauf/transmute-cli/internal/detect" +) + +// ─── State machine ─────────────────────────────────────────── + +type state int + +const ( + stateFileList state = iota // Browsing/selecting files + stateConverting // Conversion in progress + stateResults // Showing results +) + +// ─── File entry ────────────────────────────────────────────── + +type fileEntry struct { + path string + name string + ext string + size int64 + category detect.FileCategory + selected bool + targetFormat string + formats []string + formatIdx int + status string // "idle", "converting", "done", "error" + error string + outputPath string +} + +// ─── Messages ──────────────────────────────────────────────── + +type conversionDoneMsg struct { + index int + result converter.Result +} + +type conversionStartMsg struct { + index int +} + +type tickMsg time.Time + +// ─── Model ─────────────────────────────────────────────────── + +type Model struct { + files []fileEntry + cursor int + state state + keys KeyMap + width int + height int + outputDir string + showHelp bool + scroll int // scroll offset for file list + + // Progress tracking + converting int + converted int + totalToConv int + startTime time.Time +} + +// New creates a new TUI model from a list of file paths and an output directory. +func New(paths []string, outputDir string) Model { + var files []fileEntry + + for _, p := range paths { + info, err := os.Stat(p) + if err != nil { + continue + } + if info.IsDir() { + // Expand directory + dirFiles := expandDir(p) + files = append(files, dirFiles...) + } else { + entry := makeFileEntry(p, info) + if entry != nil { + files = append(files, *entry) + } + } + } + + return Model{ + files: files, + cursor: 0, + state: stateFileList, + keys: DefaultKeyMap(), + showHelp: false, + outputDir: outputDir, + } +} + +func expandDir(dir string) []fileEntry { + var files []fileEntry + entries, err := os.ReadDir(dir) + if err != nil { + return files + } + for _, e := range entries { + if e.IsDir() || strings.HasPrefix(e.Name(), ".") { + continue + } + p := filepath.Join(dir, e.Name()) + info, err := e.Info() + if err != nil { + continue + } + entry := makeFileEntry(p, info) + if entry != nil { + files = append(files, *entry) + } + } + return files +} + +func makeFileEntry(path string, info os.FileInfo) *fileEntry { + ext := strings.TrimPrefix(filepath.Ext(path), ".") + ext = strings.ToLower(ext) + + if !detect.IsSupported(ext) { + return nil + } + + formats := detect.GetAvailableFormats(ext) + if len(formats) == 0 { + return nil + } + + // Use the smart default target (matches web app defaults) + defaultTarget := detect.GetDefaultTarget(ext) + defaultIdx := 0 + for i, f := range formats { + if f == defaultTarget { + defaultIdx = i + break + } + } + + return &fileEntry{ + path: path, + name: info.Name(), + ext: ext, + size: info.Size(), + category: detect.DetectCategory(ext), + selected: true, // Select all by default + targetFormat: defaultTarget, + formats: formats, + formatIdx: defaultIdx, + status: "idle", + } +} + +// ─── Init ──────────────────────────────────────────────────── + +func (m Model) Init() tea.Cmd { + return nil +} + +// ─── Update ────────────────────────────────────────────────── + +func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + switch msg := msg.(type) { + case tea.WindowSizeMsg: + m.width = msg.Width + m.height = msg.Height + return m, nil + + case conversionDoneMsg: + if msg.index >= 0 && msg.index < len(m.files) { + if msg.result.Err != nil { + m.files[msg.index].status = "error" + m.files[msg.index].error = msg.result.Err.Error() + } else { + m.files[msg.index].status = "done" + m.files[msg.index].outputPath = msg.result.OutputPath + } + m.converted++ + } + + // Check if all done + if m.converted >= m.totalToConv { + m.state = stateResults + return m, nil + } + + // Start next conversion + return m, m.convertNext() + + case tea.KeyMsg: + return m.handleKey(msg) + } + + return m, nil +} + +func (m Model) handleKey(msg tea.KeyMsg) (tea.Model, tea.Cmd) { + switch m.state { + case stateFileList: + return m.handleFileListKey(msg) + case stateConverting: + // Only allow quit during conversion + if key.Matches(msg, m.keys.Quit) { + return m, tea.Quit + } + return m, nil + case stateResults: + return m.handleResultsKey(msg) + } + return m, nil +} + +func (m Model) handleFileListKey(msg tea.KeyMsg) (tea.Model, tea.Cmd) { + switch { + case key.Matches(msg, m.keys.Quit): + return m, tea.Quit + + case key.Matches(msg, m.keys.Up): + if m.cursor > 0 { + m.cursor-- + m.ensureVisible() + } + + case key.Matches(msg, m.keys.Down): + if m.cursor < len(m.files)-1 { + m.cursor++ + m.ensureVisible() + } + + case key.Matches(msg, m.keys.Space): + if len(m.files) > 0 { + m.files[m.cursor].selected = !m.files[m.cursor].selected + } + + case key.Matches(msg, m.keys.Left): + if len(m.files) > 0 { + f := &m.files[m.cursor] + if f.formatIdx > 0 { + f.formatIdx-- + f.targetFormat = f.formats[f.formatIdx] + } + } + + case key.Matches(msg, m.keys.Right), key.Matches(msg, m.keys.Tab): + if len(m.files) > 0 { + f := &m.files[m.cursor] + if f.formatIdx < len(f.formats)-1 { + f.formatIdx++ + f.targetFormat = f.formats[f.formatIdx] + } + } + + case key.Matches(msg, m.keys.SelectAll): + allSelected := true + for _, f := range m.files { + if !f.selected { + allSelected = false + break + } + } + for i := range m.files { + m.files[i].selected = !allSelected + } + + case key.Matches(msg, m.keys.Delete): + if len(m.files) > 0 { + m.files = append(m.files[:m.cursor], m.files[m.cursor+1:]...) + if m.cursor >= len(m.files) && m.cursor > 0 { + m.cursor-- + } + } + + case key.Matches(msg, m.keys.Convert), key.Matches(msg, m.keys.Enter): + return m.startConversion() + + case key.Matches(msg, m.keys.Help): + m.showHelp = !m.showHelp + } + + return m, nil +} + +func (m Model) handleResultsKey(msg tea.KeyMsg) (tea.Model, tea.Cmd) { + switch { + case key.Matches(msg, m.keys.Quit), key.Matches(msg, m.keys.Enter): + return m, tea.Quit + case key.Matches(msg, m.keys.Back): + // Go back to file list to convert more + m.state = stateFileList + for i := range m.files { + if m.files[i].status == "done" || m.files[i].status == "error" { + m.files[i].status = "idle" + m.files[i].error = "" + m.files[i].outputPath = "" + } + } + m.converting = 0 + m.converted = 0 + m.totalToConv = 0 + case key.Matches(msg, m.keys.Up): + if m.cursor > 0 { + m.cursor-- + } + case key.Matches(msg, m.keys.Down): + if m.cursor < len(m.files)-1 { + m.cursor++ + } + } + return m, nil +} + +// ─── Conversion logic ──────────────────────────────────────── + +func (m Model) startConversion() (Model, tea.Cmd) { + // Count selected files + count := 0 + for _, f := range m.files { + if f.selected { + count++ + } + } + if count == 0 { + return m, nil + } + + m.state = stateConverting + m.totalToConv = count + m.converted = 0 + m.converting = 0 + m.startTime = time.Now() + + return m, m.convertNext() +} + +func (m Model) convertNext() tea.Cmd { + // Find next file to convert + for i := range m.files { + if m.files[i].selected && m.files[i].status == "idle" { + m.files[i].status = "converting" + idx := i + path := m.files[i].path + target := m.files[i].targetFormat + outDir := m.outputDir + + return func() tea.Msg { + result := converter.Convert(path, target, outDir) + return conversionDoneMsg{index: idx, result: result} + } + } + } + return nil +} + +func (m *Model) ensureVisible() { + maxVisible := m.maxVisibleFiles() + if m.cursor < m.scroll { + m.scroll = m.cursor + } + if m.cursor >= m.scroll+maxVisible { + m.scroll = m.cursor - maxVisible + 1 + } +} + +func (m Model) maxVisibleFiles() int { + available := m.height - 12 // Reserve space for header, footer, borders + if available < 3 { + return 3 + } + return available +} + +// ─── Helpers ───────────────────────────────────────────────── + +func formatSize(bytes int64) string { + const ( + KB = 1024 + MB = KB * 1024 + GB = MB * 1024 + ) + switch { + case bytes >= GB: + return fmt.Sprintf("%.1f GB", float64(bytes)/float64(GB)) + case bytes >= MB: + return fmt.Sprintf("%.1f MB", float64(bytes)/float64(MB)) + case bytes >= KB: + return fmt.Sprintf("%.1f KB", float64(bytes)/float64(KB)) + default: + return fmt.Sprintf("%d B", bytes) + } +} diff --git a/cli/internal/tui/views.go b/cli/internal/tui/views.go new file mode 100644 index 0000000..5712684 --- /dev/null +++ b/cli/internal/tui/views.go @@ -0,0 +1,414 @@ +package tui + +import ( + "fmt" + "strings" + "time" + + "charm.land/lipgloss/v2" + + "github.com/noauf/transmute-cli/internal/detect" + "github.com/noauf/transmute-cli/internal/theme" +) + +// View renders the entire TUI, filling the full terminal. +func (m Model) View() string { + if m.width == 0 || m.height == 0 { + return "Loading..." + } + + var sections []string + + sections = append(sections, m.renderTitleBar()) + sections = append(sections, m.renderDivider()) + + switch m.state { + case stateFileList: + sections = append(sections, m.renderFileList()) + sections = append(sections, m.renderDivider()) + sections = append(sections, m.renderBottomBar()) + case stateConverting: + sections = append(sections, m.renderConverting()) + case stateResults: + sections = append(sections, m.renderResults()) + sections = append(sections, m.renderDivider()) + sections = append(sections, m.renderResultsFooter()) + } + + if m.showHelp { + sections = append(sections, m.renderHelp()) + } + + content := lipgloss.JoinVertical(lipgloss.Left, sections...) + + // Split into individual lines and pad each to full width with background + lines := strings.Split(content, "\n") + for i, line := range lines { + lines[i] = theme.PadLine(line, m.width) + } + + // Fill remaining vertical space with background-colored blank lines + remaining := m.height - len(lines) + if remaining > 0 { + fill := theme.FillBlankLines(remaining, m.width) + return strings.Join(lines, "\n") + "\n" + fill + } + + // Truncate if content exceeds terminal height + if len(lines) > m.height { + lines = lines[:m.height] + } + + return strings.Join(lines, "\n") +} + +// ─── Title bar ─────────────────────────────────────────────── + +func (m Model) renderTitleBar() string { + title := theme.Logo.Render("transmute") + + fileCount := fmt.Sprintf("%d files", len(m.files)) + selected := 0 + for _, f := range m.files { + if f.selected { + selected++ + } + } + info := theme.Breadcrumb.Render(fmt.Sprintf(" %s · %d selected", fileCount, selected)) + + left := title + info + padding := "" + rightContent := theme.Help.Render("? help") + totalWidth := lipgloss.Width(left) + lipgloss.Width(rightContent) + 2 + if m.width > totalWidth { + padding = strings.Repeat(" ", m.width-totalWidth) + } + + return left + padding + rightContent +} + +// ─── Divider ───────────────────────────────────────────────── + +func (m Model) renderDivider() string { + w := m.width + if w <= 0 { + w = 60 + } + return theme.Divider.Render(strings.Repeat("─", w)) +} + +// ─── File list ─────────────────────────────────────────────── + +func (m Model) renderFileList() string { + if len(m.files) == 0 { + empty := lipgloss.NewStyle(). + Foreground(theme.Light). + Italic(true). + Padding(2, 4). + Render("No supported files found. Pass file paths or glob patterns as arguments.") + return empty + } + + // Column header + header := renderColumnHeader(m.width) + + maxVisible := m.maxVisibleFiles() + end := m.scroll + maxVisible + if end > len(m.files) { + end = len(m.files) + } + + var rows []string + rows = append(rows, header) + + for i := m.scroll; i < end; i++ { + rows = append(rows, m.renderFileRow(i)) + } + + // Pad with empty rows so the file list always fills the available space + rendered := len(rows) - 1 // subtract header + if rendered < maxVisible { + emptyRow := strings.Repeat(" ", m.width) + for i := rendered; i < maxVisible; i++ { + rows = append(rows, emptyRow) + } + } + + // Scrollbar indicator + if len(m.files) > maxVisible { + scrollInfo := theme.Help.Render(fmt.Sprintf( + " showing %d–%d of %d", m.scroll+1, end, len(m.files))) + rows = append(rows, scrollInfo) + } + + return lipgloss.JoinVertical(lipgloss.Left, rows...) +} + +func renderColumnHeader(width int) string { + nameW := 30 + sizeW := 10 + formatW := 20 + statusW := 12 + + if width > 100 { + nameW = width - sizeW - formatW - statusW - 12 + } + + name := theme.Breadcrumb.Copy().Width(nameW).Render(" Name") + size := theme.Breadcrumb.Copy().Width(sizeW).Align(lipgloss.Right).Render("Size") + format := theme.Breadcrumb.Copy().Width(formatW).Align(lipgloss.Center).Render("Convert to") + status := theme.Breadcrumb.Copy().Width(statusW).Align(lipgloss.Center).Render("Status") + + return name + size + " " + format + " " + status +} + +func (m Model) renderFileRow(idx int) string { + f := m.files[idx] + isCursor := idx == m.cursor + + nameW := 30 + sizeW := 10 + formatW := 20 + statusW := 12 + + if m.width > 100 { + nameW = m.width - sizeW - formatW - statusW - 12 + } + + // Cursor + selection indicator + prefix := " " + if isCursor { + prefix = theme.Selected.Render("> ") + } + + // Checkbox + check := "○" + if f.selected { + check = theme.StatusDone.Render("●") + } + + // Category icon + file name + catColor := theme.CategoryColor(string(f.category)) + icon := detect.CategoryIcon(f.category) + nameText := f.name + if len(nameText) > nameW-8 { + nameText = nameText[:nameW-11] + "..." + } + + var nameStyle lipgloss.Style + if isCursor { + nameStyle = theme.FileName.Copy().Bold(true) + } else { + nameStyle = theme.FileName + } + + nameCol := lipgloss.NewStyle().Width(nameW).Render( + prefix + check + " " + icon + " " + theme.ExtBadge(catColor).Render(strings.ToUpper(f.ext)) + " " + nameStyle.Render(nameText)) + + // Size + sizeCol := theme.FileSize.Copy().Width(sizeW).Align(lipgloss.Right).Render(formatSize(f.size)) + + // Format selector + formatStr := renderFormatSelector(f, isCursor) + formatCol := lipgloss.NewStyle().Width(formatW).Align(lipgloss.Center).Render(formatStr) + + // Status + var statusStr string + switch f.status { + case "idle": + statusStr = theme.StatusIdle.Render("idle") + case "converting": + statusStr = theme.StatusConverting.Render("converting...") + case "done": + statusStr = theme.StatusDone.Render("done") + case "error": + statusStr = theme.StatusError.Render("error") + } + statusCol := lipgloss.NewStyle().Width(statusW).Align(lipgloss.Center).Render(statusStr) + + return nameCol + sizeCol + " " + formatCol + " " + statusCol +} + +func renderFormatSelector(f fileEntry, active bool) string { + if len(f.formats) == 0 { + return theme.Help.Render("—") + } + + var parts []string + if active && f.formatIdx > 0 { + parts = append(parts, theme.Help.Render("< ")) + } else { + parts = append(parts, " ") + } + + if active { + parts = append(parts, theme.Selected.Render(f.targetFormat)) + } else { + parts = append(parts, theme.Unselected.Render(f.targetFormat)) + } + + if active && f.formatIdx < len(f.formats)-1 { + parts = append(parts, theme.Help.Render(" >")) + } else { + parts = append(parts, " ") + } + + return strings.Join(parts, "") +} + +// ─── Bottom bar ────────────────────────────────────────────── + +func (m Model) renderBottomBar() string { + selected := 0 + for _, f := range m.files { + if f.selected { + selected++ + } + } + + var left string + if selected > 0 { + left = theme.ButtonPrimary.Render(fmt.Sprintf(" Convert %d files [c] ", selected)) + } else { + left = theme.Help.Render("Select files to convert") + } + + right := theme.Help.Render("up/down navigate left/right format space select a all d remove q quit") + + padding := "" + totalWidth := lipgloss.Width(left) + lipgloss.Width(right) + if m.width > totalWidth { + padding = strings.Repeat(" ", m.width-totalWidth) + } + + return left + padding + right +} + +// ─── Converting view ───────────────────────────────────────── + +func (m Model) renderConverting() string { + elapsed := time.Since(m.startTime).Round(time.Millisecond) + + header := theme.StatusConverting.Render(fmt.Sprintf( + " Converting... %d/%d (%s)", m.converted, m.totalToConv, elapsed)) + + // Progress bar + barWidth := m.width - 8 + if barWidth < 20 { + barWidth = 20 + } + progress := float64(m.converted) / float64(m.totalToConv) + filled := int(progress * float64(barWidth)) + if filled > barWidth { + filled = barWidth + } + + bar := " " + + theme.ProgressFilled.Render(strings.Repeat("█", filled)) + + theme.ProgressEmpty.Render(strings.Repeat("░", barWidth-filled)) + + // Show current files being converted + var current []string + for _, f := range m.files { + if f.status == "converting" { + current = append(current, fmt.Sprintf(" %s -> %s", f.name, f.targetFormat)) + } + } + currentStr := theme.Help.Render(strings.Join(current, "\n")) + + return lipgloss.JoinVertical(lipgloss.Left, + "", + header, + bar, + "", + currentStr, + "", + theme.Help.Render(" Press q to cancel"), + ) +} + +// ─── Results view ──────────────────────────────────────────── + +func (m Model) renderResults() string { + var rows []string + + successCount := 0 + errorCount := 0 + for _, f := range m.files { + if !f.selected { + continue + } + if f.status == "done" { + successCount++ + } else if f.status == "error" { + errorCount++ + } + } + + elapsed := time.Since(m.startTime).Round(time.Millisecond) + summary := theme.StatusDone.Render(fmt.Sprintf( + " Conversion complete! %d succeeded", successCount)) + if errorCount > 0 { + summary += theme.StatusError.Render(fmt.Sprintf(", %d failed", errorCount)) + } + summary += theme.Help.Render(fmt.Sprintf(" (%s)", elapsed)) + + rows = append(rows, "", summary, "") + + // List results + for _, f := range m.files { + if !f.selected { + continue + } + switch f.status { + case "done": + rows = append(rows, theme.StatusDone.Render(" done ")+ + theme.FileName.Render(f.name)+ + theme.Help.Render(" -> ")+ + theme.BreadcrumbActive.Render(f.outputPath)) + case "error": + rows = append(rows, theme.StatusError.Render(" fail ")+ + theme.FileName.Render(f.name)+ + theme.Help.Render(" -- ")+ + theme.StatusError.Render(f.error)) + } + } + + return lipgloss.JoinVertical(lipgloss.Left, rows...) +} + +func (m Model) renderResultsFooter() string { + return theme.Help.Render(" Press enter to exit | esc to convert more") +} + +// ─── Help overlay ──────────────────────────────────────────── + +func (m Model) renderHelp() string { + keys := []struct { + key string + desc string + }{ + {"up/down, j/k", "Navigate files"}, + {"left/right, h/l", "Change target format"}, + {"space", "Toggle file selection"}, + {"a", "Select / deselect all"}, + {"d", "Remove file from list"}, + {"c or enter", "Start conversion"}, + {"esc", "Go back"}, + {"q or ctrl+c", "Quit"}, + } + + var lines []string + lines = append(lines, "") + lines = append(lines, theme.BreadcrumbActive.Render(" Keyboard Shortcuts")) + lines = append(lines, "") + + for _, k := range keys { + lines = append(lines, fmt.Sprintf(" %s %s", + theme.Selected.Copy().Width(18).Render(k.key), + theme.Help.Render(k.desc))) + } + lines = append(lines, "") + + return lipgloss.JoinVertical(lipgloss.Left, lines...) +} diff --git a/cli/internal/update/update.go b/cli/internal/update/update.go new file mode 100644 index 0000000..d9e9d16 --- /dev/null +++ b/cli/internal/update/update.go @@ -0,0 +1,260 @@ +package update + +import ( + "archive/tar" + "compress/gzip" + "encoding/json" + "fmt" + "io" + "net/http" + "os" + "runtime" + "strings" +) + +const ( + // CurrentVersion is the embedded build version. Updated at release time. + CurrentVersion = "0.1.0" + + repoOwner = "noauf" + repoName = "Transmute" + apiURL = "https://api.github.com/repos/" + repoOwner + "/" + repoName + "/releases/latest" +) + +// ghRelease is the subset of the GitHub release JSON we care about. +type ghRelease struct { + TagName string `json:"tag_name"` + Assets []ghAsset `json:"assets"` +} + +type ghAsset struct { + Name string `json:"name"` + BrowserDownloadURL string `json:"browser_download_url"` +} + +// Check queries GitHub for the latest release and reports whether an update +// is available, and if so, which version. +func Check() (available bool, latestVersion string, err error) { + rel, err := fetchLatest() + if err != nil { + return false, "", err + } + latest := strings.TrimPrefix(rel.TagName, "v") + if latest == CurrentVersion { + return false, latest, nil + } + return true, latest, nil +} + +// Run performs a self-update: download the latest release binary and replace +// the current executable. +func Run(progress func(string)) error { + progress("Checking for updates...") + + rel, err := fetchLatest() + if err != nil { + return fmt.Errorf("failed to check for updates: %w", err) + } + + latest := strings.TrimPrefix(rel.TagName, "v") + if latest == CurrentVersion { + progress(fmt.Sprintf("Already up to date (v%s)", CurrentVersion)) + return nil + } + + progress(fmt.Sprintf("Update available: v%s -> v%s", CurrentVersion, latest)) + + // Find the matching asset for this OS/arch + assetName := buildAssetName() + var downloadURL string + for _, a := range rel.Assets { + if a.Name == assetName { + downloadURL = a.BrowserDownloadURL + break + } + } + if downloadURL == "" { + return fmt.Errorf("no release binary found for %s/%s (expected %s)", runtime.GOOS, runtime.GOARCH, assetName) + } + + progress(fmt.Sprintf("Downloading %s...", assetName)) + + // Download to a temp file + tmpFile, err := downloadAsset(downloadURL) + if err != nil { + return fmt.Errorf("download failed: %w", err) + } + defer os.Remove(tmpFile) + + // Extract binary from tarball (or use directly if not a tarball) + var binaryPath string + if strings.HasSuffix(assetName, ".tar.gz") { + binaryPath, err = extractTarGz(tmpFile) + if err != nil { + return fmt.Errorf("failed to extract archive: %w", err) + } + defer os.Remove(binaryPath) + } else { + binaryPath = tmpFile + } + + // Replace the current executable + exePath, err := os.Executable() + if err != nil { + return fmt.Errorf("cannot determine executable path: %w", err) + } + + progress("Installing update...") + + if err := replaceExecutable(exePath, binaryPath); err != nil { + return fmt.Errorf("failed to replace executable: %w", err) + } + + progress(fmt.Sprintf("Updated to v%s", latest)) + return nil +} + +func fetchLatest() (*ghRelease, error) { + req, err := http.NewRequest("GET", apiURL, nil) + if err != nil { + return nil, err + } + req.Header.Set("Accept", "application/vnd.github.v3+json") + req.Header.Set("User-Agent", "transmute-cli/"+CurrentVersion) + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != 200 { + return nil, fmt.Errorf("GitHub API returned %d", resp.StatusCode) + } + + var rel ghRelease + if err := json.NewDecoder(resp.Body).Decode(&rel); err != nil { + return nil, err + } + return &rel, nil +} + +// buildAssetName returns the expected asset filename for the current platform. +// Convention: transmute--.tar.gz (or .zip for Windows) +func buildAssetName() string { + goos := runtime.GOOS + arch := runtime.GOARCH + + // Normalize arch names + switch arch { + case "amd64": + arch = "x86_64" + case "arm64": + arch = "arm64" + } + + if goos == "windows" { + return fmt.Sprintf("transmute-%s-%s.zip", goos, arch) + } + + return fmt.Sprintf("transmute-%s-%s.tar.gz", goos, arch) +} + +func downloadAsset(url string) (string, error) { + resp, err := http.Get(url) //nolint:gosec + if err != nil { + return "", err + } + defer resp.Body.Close() + + if resp.StatusCode != 200 { + return "", fmt.Errorf("download returned HTTP %d", resp.StatusCode) + } + + tmp, err := os.CreateTemp("", "transmute-update-*") + if err != nil { + return "", err + } + + if _, err := io.Copy(tmp, resp.Body); err != nil { + tmp.Close() + os.Remove(tmp.Name()) + return "", err + } + tmp.Close() + return tmp.Name(), nil +} + +func extractTarGz(archivePath string) (string, error) { + f, err := os.Open(archivePath) + if err != nil { + return "", err + } + defer f.Close() + + gz, err := gzip.NewReader(f) + if err != nil { + return "", err + } + defer gz.Close() + + tr := tar.NewReader(gz) + for { + hdr, err := tr.Next() + if err == io.EOF { + break + } + if err != nil { + return "", err + } + + // Look for the transmute binary + name := hdr.Name + if strings.HasSuffix(name, "/transmute") || name == "transmute" { + tmp, err := os.CreateTemp("", "transmute-bin-*") + if err != nil { + return "", err + } + if _, err := io.Copy(tmp, tr); err != nil { + tmp.Close() + os.Remove(tmp.Name()) + return "", err + } + tmp.Close() + if err := os.Chmod(tmp.Name(), 0o755); err != nil { + return "", err + } + return tmp.Name(), nil + } + } + + return "", fmt.Errorf("transmute binary not found in archive") +} + +func replaceExecutable(target, replacement string) error { + // Read the new binary + data, err := os.ReadFile(replacement) + if err != nil { + return err + } + + // Get current executable's permissions + info, err := os.Stat(target) + if err != nil { + return err + } + + // Write new binary to a temp file next to the target + tmpPath := target + ".new" + if err := os.WriteFile(tmpPath, data, info.Mode()); err != nil { + return err + } + + // Atomic rename + if err := os.Rename(tmpPath, target); err != nil { + os.Remove(tmpPath) + return err + } + + return nil +} diff --git a/cli/main.go b/cli/main.go new file mode 100644 index 0000000..8ba2557 --- /dev/null +++ b/cli/main.go @@ -0,0 +1,7 @@ +package main + +import "github.com/noauf/transmute-cli/cmd" + +func main() { + cmd.Execute() +} diff --git a/cli/transmute b/cli/transmute new file mode 100755 index 0000000..3fc7191 Binary files /dev/null and b/cli/transmute differ diff --git a/install.sh b/install.sh new file mode 100755 index 0000000..6230dd4 --- /dev/null +++ b/install.sh @@ -0,0 +1,109 @@ +#!/bin/sh +# Transmute CLI installer +# Usage: curl -fsSL https://raw.githubusercontent.com/noauf/Transmute/main/install.sh | sh +set -e + +REPO="noauf/Transmute" +BINARY="transmute" +INSTALL_DIR="/usr/local/bin" + +# Detect OS +OS="$(uname -s)" +case "$OS" in + Darwin) OS="darwin" ;; + Linux) OS="linux" ;; + MINGW*|MSYS*|CYGWIN*) OS="windows" ;; + *) + echo "Error: unsupported operating system: $OS" + exit 1 + ;; +esac + +# Detect architecture +ARCH="$(uname -m)" +case "$ARCH" in + x86_64|amd64) ARCH="x86_64" ;; + arm64|aarch64) ARCH="arm64" ;; + *) + echo "Error: unsupported architecture: $ARCH" + exit 1 + ;; +esac + +# Determine file extension +if [ "$OS" = "windows" ]; then + EXT="zip" +else + EXT="tar.gz" +fi + +ASSET="${BINARY}-${OS}-${ARCH}.${EXT}" + +echo "Transmute CLI installer" +echo "======================" +echo "" +echo " OS: $OS" +echo " Arch: $ARCH" +echo "" + +# Get latest release tag +echo "Fetching latest release..." +TAG=$(curl -fsSL "https://api.github.com/repos/${REPO}/releases/latest" | grep '"tag_name"' | head -1 | sed 's/.*"tag_name": *"\([^"]*\)".*/\1/') + +if [ -z "$TAG" ]; then + echo "Error: could not determine latest release" + exit 1 +fi + +echo " Latest version: $TAG" + +DOWNLOAD_URL="https://github.com/${REPO}/releases/download/${TAG}/${ASSET}" + +echo " Downloading $ASSET..." + +# Create temp directory +TMP_DIR="$(mktemp -d)" +trap 'rm -rf "$TMP_DIR"' EXIT + +curl -fsSL "$DOWNLOAD_URL" -o "${TMP_DIR}/${ASSET}" + +# Extract +echo " Extracting..." +if [ "$EXT" = "tar.gz" ]; then + tar -xzf "${TMP_DIR}/${ASSET}" -C "$TMP_DIR" +else + unzip -q "${TMP_DIR}/${ASSET}" -d "$TMP_DIR" +fi + +# Find the binary +BIN_PATH="" +if [ -f "${TMP_DIR}/${BINARY}" ]; then + BIN_PATH="${TMP_DIR}/${BINARY}" +elif [ -f "${TMP_DIR}/${BINARY}-${OS}-${ARCH}/${BINARY}" ]; then + BIN_PATH="${TMP_DIR}/${BINARY}-${OS}-${ARCH}/${BINARY}" +fi + +if [ -z "$BIN_PATH" ]; then + echo "Error: could not find ${BINARY} binary in archive" + exit 1 +fi + +chmod +x "$BIN_PATH" + +# Install +echo " Installing to ${INSTALL_DIR}/${BINARY}..." +if [ -w "$INSTALL_DIR" ]; then + mv "$BIN_PATH" "${INSTALL_DIR}/${BINARY}" +else + echo " (requires sudo)" + sudo mv "$BIN_PATH" "${INSTALL_DIR}/${BINARY}" +fi + +echo "" +echo " Installed transmute $TAG to ${INSTALL_DIR}/${BINARY}" +echo "" +echo " Get started:" +echo " transmute *.png Convert all PNGs" +echo " transmute ./files/ Convert all files in a directory" +echo " transmute --help Show all options" +echo "" diff --git a/src/app/page.tsx b/src/app/page.tsx index ae85042..575c83d 100644 --- a/src/app/page.tsx +++ b/src/app/page.tsx @@ -947,6 +947,108 @@ export default function LandingPage() { + {/* ──── CLI / TERMINAL LOVERS ──── */} +
+ + + Terminal Lovers + +

+ Prefer the command line? +

+

+ Transmute has a full-featured CLI with an interactive TUI. Batch convert files, use glob patterns, pipe into scripts. +

+
+ + + {/* Terminal window */} +
+ {/* Terminal title bar */} +
+
+
+
+
+
+
+ Terminal +
+
+ + {/* Terminal body */} +
+ {/* Install command */} +
+ $ +
+ curl -fsSL + https://raw.githubusercontent.com/noauf/Transmute/main/install.sh + | + sh +
+
+ {/* Simulated output */} +
+
Transmute CLI installer
+
OS: darwin Arch: arm64
+
Latest version: v0.1.0
+
Installed transmute v0.1.0
+
+ {/* Usage examples */} +
+ $ + transmute *.png +
+
Convert all PNGs in current directory
+
+ $ + transmute ./photos/ -d ./output/ +
+
Batch convert a whole directory
+
+
+ + + {/* CLI feature bullets */} + + {[ + { label: 'Interactive TUI', color: '#f472b6' }, + { label: 'Glob patterns', color: '#a78bfa' }, + { label: 'Batch convert', color: '#60a5fa' }, + { label: 'Auto-downloads ffmpeg', color: '#fb923c' }, + { label: 'Self-update', color: '#34d399' }, + { label: '70+ formats', color: '#2dd4bf' }, + ].map((feat) => ( + + + {feat.label} + + ))} + +
+ {/* ──── FOOTER CTA ──── */}