feat: add pptx, font, psd, epub converters and context-aware previews
- Wire presentationConverter (pptx read/write via pptxgenjs+jszip) - Add fontConverter (ttf/otf/woff/woff2 via opentype.js + woff2-encoder) - Add PSD support via ag-psd in imageConverter - Add spreadsheetConverter (xlsx/xls/ods via SheetJS) - Add ebookConverter (epub via jszip) - Expand data converter with ini/env/properties/ndjson/jsonl/sql formats - Add context-aware previews for pptx, epub, fonts, and psd in PreviewModal - Remove unsupported .doc extension from fileDetector - Replace Node-only wawoff2 with browser-compatible woff2-encoder
This commit is contained in:
Generated
+222
-2
@@ -10,9 +10,11 @@
|
||||
"dependencies": {
|
||||
"@ffmpeg/ffmpeg": "^0.12.15",
|
||||
"@ffmpeg/util": "^0.12.2",
|
||||
"ag-psd": "^30.1.0",
|
||||
"docx": "^9.6.0",
|
||||
"fast-xml-parser": "^5.4.2",
|
||||
"framer-motion": "^12.35.2",
|
||||
"heic2any": "^0.0.4",
|
||||
"html2canvas-pro": "^2.0.2",
|
||||
"js-yaml": "^4.1.1",
|
||||
"jspdf": "^4.2.0",
|
||||
@@ -20,16 +22,21 @@
|
||||
"mammoth": "^1.11.0",
|
||||
"marked": "^17.0.4",
|
||||
"next": "16.1.6",
|
||||
"opentype.js": "^1.3.4",
|
||||
"papaparse": "^5.5.3",
|
||||
"pdfjs-dist": "^5.5.207",
|
||||
"pptxgenjs": "^4.0.1",
|
||||
"react": "19.2.3",
|
||||
"react-dom": "19.2.3",
|
||||
"smol-toml": "^1.6.0"
|
||||
"smol-toml": "^1.6.0",
|
||||
"woff2-encoder": "^2.0.0",
|
||||
"xlsx": "^0.18.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tailwindcss/postcss": "^4",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/node": "^20",
|
||||
"@types/opentype.js": "^1.3.9",
|
||||
"@types/papaparse": "^5.5.2",
|
||||
"@types/react": "^19",
|
||||
"@types/react-dom": "^19",
|
||||
@@ -1867,6 +1874,13 @@
|
||||
"undici-types": "~6.21.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/opentype.js": {
|
||||
"version": "1.3.9",
|
||||
"resolved": "https://registry.npmjs.org/@types/opentype.js/-/opentype.js-1.3.9.tgz",
|
||||
"integrity": "sha512-KOGywvDPncA4/tTWV5xKNhjpsoSSAHIx3mHOhL5l3XX+c6Xu2dQnHvGs7mRNQsQRte1EqmQ0cPQQ8Z14lkv+yw==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/pako": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/pako/-/pako-2.0.4.tgz",
|
||||
@@ -2513,6 +2527,25 @@
|
||||
"acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/adler-32": {
|
||||
"version": "1.3.1",
|
||||
"resolved": "https://registry.npmjs.org/adler-32/-/adler-32-1.3.1.tgz",
|
||||
"integrity": "sha512-ynZ4w/nUUv5rrsR8UUGoe1VC9hZj6V5hU9Qw1HlMDJGEJw5S7TfTErWTjMys6M7vr0YWcPqs3qAr4ss0nDfP+A==",
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/ag-psd": {
|
||||
"version": "30.1.0",
|
||||
"resolved": "https://registry.npmjs.org/ag-psd/-/ag-psd-30.1.0.tgz",
|
||||
"integrity": "sha512-1ce6o84aC+oVyl83A35HHUniGjwA3piHmGem3J2odBOFRq5p7i4htaco94vePLfOcingZu4fsyospJLwPagkhg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"base64-js": "1.5.1",
|
||||
"pako": "2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/ajv": {
|
||||
"version": "6.14.0",
|
||||
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.14.0.tgz",
|
||||
@@ -2987,6 +3020,19 @@
|
||||
"node": ">=10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/cfb": {
|
||||
"version": "1.2.2",
|
||||
"resolved": "https://registry.npmjs.org/cfb/-/cfb-1.2.2.tgz",
|
||||
"integrity": "sha512-KfdUZsSOw19/ObEWasvBP/Ac4reZvAGauZhs6S/gqNhXhI7cKwvlH7ulj+dOEYnca4bm4SGo8C1bTAQvnTjgQA==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"adler-32": "~1.3.0",
|
||||
"crc-32": "~1.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/chalk": {
|
||||
"version": "4.1.2",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
|
||||
@@ -3010,6 +3056,15 @@
|
||||
"integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/codepage": {
|
||||
"version": "1.15.0",
|
||||
"resolved": "https://registry.npmjs.org/codepage/-/codepage-1.15.0.tgz",
|
||||
"integrity": "sha512-3g6NUTPd/YtuuGrhMnOMRjFc+LJw/bnMp3+0r/Wcz3IXUuCosKRJvMphm5+Q+bvTVGcJJuRvVLuYba+WojaFaA==",
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
@@ -3062,6 +3117,18 @@
|
||||
"integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/crc-32": {
|
||||
"version": "1.2.2",
|
||||
"resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz",
|
||||
"integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==",
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"crc32": "bin/crc32.njs"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/cross-spawn": {
|
||||
"version": "7.0.6",
|
||||
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
|
||||
@@ -4178,6 +4245,15 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/frac": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/frac/-/frac-1.1.2.tgz",
|
||||
"integrity": "sha512-w/XBfkibaTl3YDqASwfDUqkna4Z2p9cFSr1aHDt0WoMTECnRfBOv2WArlZILlqgWlmdIlALXGpM2AOhEk5W3IA==",
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/framer-motion": {
|
||||
"version": "12.35.2",
|
||||
"resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-12.35.2.tgz",
|
||||
@@ -4503,6 +4579,12 @@
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/heic2any": {
|
||||
"version": "0.0.4",
|
||||
"resolved": "https://registry.npmjs.org/heic2any/-/heic2any-0.0.4.tgz",
|
||||
"integrity": "sha512-3lLnZiDELfabVH87htnRolZ2iehX9zwpRyGNz22GKXIu0fznlblf0/ftppXKNqS26dqFSeqfIBhAmAj/uSp0cA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/hermes-estree": {
|
||||
"version": "0.25.1",
|
||||
"resolved": "https://registry.npmjs.org/hermes-estree/-/hermes-estree-0.25.1.tgz",
|
||||
@@ -4547,6 +4629,12 @@
|
||||
"node": ">=16.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/https": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/https/-/https-1.0.0.tgz",
|
||||
"integrity": "sha512-4EC57ddXrkaF0x83Oj8sM6SLQHAWXw90Skqu2M4AEWENZ3F02dFJE/GARA8igO79tcgYqGrD7ae4f5L3um2lgg==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/ignore": {
|
||||
"version": "5.3.2",
|
||||
"resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz",
|
||||
@@ -4557,6 +4645,21 @@
|
||||
"node": ">= 4"
|
||||
}
|
||||
},
|
||||
"node_modules/image-size": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/image-size/-/image-size-1.2.1.tgz",
|
||||
"integrity": "sha512-rH+46sQJ2dlwfjfhCyNx5thzrv+dtmBIhPHk0zgRUukHzZ/kRueTJXoYYsclBaKcSMBWuGbOFXtioLpzTb5euw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"queue": "6.0.2"
|
||||
},
|
||||
"bin": {
|
||||
"image-size": "bin/image-size.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.x"
|
||||
}
|
||||
},
|
||||
"node_modules/immediate": {
|
||||
"version": "3.0.6",
|
||||
"resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz",
|
||||
@@ -5980,6 +6083,22 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/opentype.js": {
|
||||
"version": "1.3.4",
|
||||
"resolved": "https://registry.npmjs.org/opentype.js/-/opentype.js-1.3.4.tgz",
|
||||
"integrity": "sha512-d2JE9RP/6uagpQAVtJoF0pJJA/fgai89Cc50Yp0EJHk+eLp6QQ7gBoblsnubRULNY132I0J1QKMJ+JTbMqz4sw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"string.prototype.codepointat": "^0.2.1",
|
||||
"tiny-inflate": "^1.0.3"
|
||||
},
|
||||
"bin": {
|
||||
"ot": "bin/ot"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/option": {
|
||||
"version": "0.2.4",
|
||||
"resolved": "https://registry.npmjs.org/option/-/option-0.2.4.tgz",
|
||||
@@ -6193,6 +6312,27 @@
|
||||
"node": "^10 || ^12 || >=14"
|
||||
}
|
||||
},
|
||||
"node_modules/pptxgenjs": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/pptxgenjs/-/pptxgenjs-4.0.1.tgz",
|
||||
"integrity": "sha512-TeJISr8wouAuXw4C1F/mC33xbZs/FuEG6nH9FG1Zj+nuPcGMP5YRHl6X+j3HSUnS1f3at6k75ZZXPMZlA5Lj9A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "^22.8.1",
|
||||
"https": "^1.0.0",
|
||||
"image-size": "^1.2.1",
|
||||
"jszip": "^3.10.1"
|
||||
}
|
||||
},
|
||||
"node_modules/pptxgenjs/node_modules/@types/node": {
|
||||
"version": "22.19.15",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.15.tgz",
|
||||
"integrity": "sha512-F0R/h2+dsy5wJAUe3tAU6oqa2qbWY5TpNfL/RGmo1y38hiyO1w3x2jPtt76wmuaJI4DQnOBu21cNXQ2STIUUWg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"undici-types": "~6.21.0"
|
||||
}
|
||||
},
|
||||
"node_modules/prelude-ls": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
|
||||
@@ -6231,6 +6371,15 @@
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/queue": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/queue/-/queue-6.0.2.tgz",
|
||||
"integrity": "sha512-iHZWu+q3IdFZFX36ro/lKBkSvfkztY5Y7HMiPlOUjhupPcG2JMfst2KKEpu5XndviX/3UhFbRngUPNKtgvtZiA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"inherits": "~2.0.3"
|
||||
}
|
||||
},
|
||||
"node_modules/queue-microtask": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
||||
@@ -6773,6 +6922,18 @@
|
||||
"integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==",
|
||||
"license": "BSD-3-Clause"
|
||||
},
|
||||
"node_modules/ssf": {
|
||||
"version": "0.11.2",
|
||||
"resolved": "https://registry.npmjs.org/ssf/-/ssf-0.11.2.tgz",
|
||||
"integrity": "sha512-+idbmIXoYET47hH+d7dfm2epdOMUDjqcB4648sTZ+t2JwoyBFL/insLfB/racrDmsKB3diwsDA696pZMieAC5g==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"frac": "~1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/stable-hash": {
|
||||
"version": "0.0.5",
|
||||
"resolved": "https://registry.npmjs.org/stable-hash/-/stable-hash-0.0.5.tgz",
|
||||
@@ -6813,6 +6974,12 @@
|
||||
"safe-buffer": "~5.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/string.prototype.codepointat": {
|
||||
"version": "0.2.1",
|
||||
"resolved": "https://registry.npmjs.org/string.prototype.codepointat/-/string.prototype.codepointat-0.2.1.tgz",
|
||||
"integrity": "sha512-2cBVCj6I4IOvEnjgO/hWqXjqBGsY+zwPmHl12Srk9IXSZ56Jwwmy+66XO5Iut/oQVR7t5ihYdLB0GMa4alEUcg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/string.prototype.includes": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/string.prototype.includes/-/string.prototype.includes-2.0.1.tgz",
|
||||
@@ -7050,6 +7217,12 @@
|
||||
"utrie": "^1.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/tiny-inflate": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/tiny-inflate/-/tiny-inflate-1.0.3.tgz",
|
||||
"integrity": "sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/tinyglobby": {
|
||||
"version": "0.2.15",
|
||||
"resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz",
|
||||
@@ -7314,7 +7487,6 @@
|
||||
"version": "6.21.0",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
|
||||
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/unrs-resolver": {
|
||||
@@ -7513,6 +7685,33 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/wmf": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/wmf/-/wmf-1.0.2.tgz",
|
||||
"integrity": "sha512-/p9K7bEh0Dj6WbXg4JG0xvLQmIadrner1bi45VMJTfnbVHsc7yIajZyoSoK60/dtVBs12Fm6WkUI5/3WAVsNMw==",
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/woff2-encoder": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/woff2-encoder/-/woff2-encoder-2.0.0.tgz",
|
||||
"integrity": "sha512-wH1rcskczCP5+BJABM88aM3hoEX1VUW6U+Ksx/7bmXvctWGqF+O/YQdC6vWQ7lJS7ny21a0MkAqcZyOP2O8CAg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 18.x"
|
||||
}
|
||||
},
|
||||
"node_modules/word": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/word/-/word-0.3.0.tgz",
|
||||
"integrity": "sha512-OELeY0Q61OXpdUfTp+oweA/vtLVg5VDOXh+3he3PNzLGG/y0oylSOC1xRVj0+l4vQ3tj/bB1HVHv1ocXkQceFA==",
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/word-wrap": {
|
||||
"version": "1.2.5",
|
||||
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz",
|
||||
@@ -7523,6 +7722,27 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/xlsx": {
|
||||
"version": "0.18.5",
|
||||
"resolved": "https://registry.npmjs.org/xlsx/-/xlsx-0.18.5.tgz",
|
||||
"integrity": "sha512-dmg3LCjBPHZnQp5/F/+nnTa+miPJxUXB6vtk42YjBBKayDNagxGEeIdWApkYPOf3Z3pm3k62Knjzp7lMeTEtFQ==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"adler-32": "~1.3.0",
|
||||
"cfb": "~1.2.1",
|
||||
"codepage": "~1.15.0",
|
||||
"crc-32": "~1.2.1",
|
||||
"ssf": "~0.11.2",
|
||||
"wmf": "~1.0.1",
|
||||
"word": "~0.3.0"
|
||||
},
|
||||
"bin": {
|
||||
"xlsx": "bin/xlsx.njs"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/xml": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/xml/-/xml-1.0.1.tgz",
|
||||
|
||||
+8
-1
@@ -11,9 +11,11 @@
|
||||
"dependencies": {
|
||||
"@ffmpeg/ffmpeg": "^0.12.15",
|
||||
"@ffmpeg/util": "^0.12.2",
|
||||
"ag-psd": "^30.1.0",
|
||||
"docx": "^9.6.0",
|
||||
"fast-xml-parser": "^5.4.2",
|
||||
"framer-motion": "^12.35.2",
|
||||
"heic2any": "^0.0.4",
|
||||
"html2canvas-pro": "^2.0.2",
|
||||
"js-yaml": "^4.1.1",
|
||||
"jspdf": "^4.2.0",
|
||||
@@ -21,16 +23,21 @@
|
||||
"mammoth": "^1.11.0",
|
||||
"marked": "^17.0.4",
|
||||
"next": "16.1.6",
|
||||
"opentype.js": "^1.3.4",
|
||||
"papaparse": "^5.5.3",
|
||||
"pdfjs-dist": "^5.5.207",
|
||||
"pptxgenjs": "^4.0.1",
|
||||
"react": "19.2.3",
|
||||
"react-dom": "19.2.3",
|
||||
"smol-toml": "^1.6.0"
|
||||
"smol-toml": "^1.6.0",
|
||||
"woff2-encoder": "^2.0.0",
|
||||
"xlsx": "^0.18.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tailwindcss/postcss": "^4",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/node": "^20",
|
||||
"@types/opentype.js": "^1.3.9",
|
||||
"@types/papaparse": "^5.5.2",
|
||||
"@types/react": "^19",
|
||||
"@types/react-dom": "^19",
|
||||
|
||||
+707
-42
@@ -11,18 +11,12 @@ interface PreviewModalProps {
|
||||
onDownload: (file: UploadedFile) => void;
|
||||
}
|
||||
|
||||
/** Formats we can show as text in a <pre> block */
|
||||
const TEXT_FORMATS = new Set([
|
||||
'txt', 'md', 'json', 'csv', 'tsv', 'xml', 'yaml', 'yml', 'toml',
|
||||
'ini', 'env', 'properties', 'ndjson', 'jsonl', 'sql', 'rst', 'tex',
|
||||
]);
|
||||
|
||||
/** Formats that render in an <iframe> */
|
||||
const IFRAME_FORMATS = new Set(['pdf', 'html', 'htm']);
|
||||
/* ─── Format classification ─── */
|
||||
|
||||
/** Image formats for <img> */
|
||||
const IMAGE_FORMATS = new Set([
|
||||
'png', 'jpg', 'jpeg', 'webp', 'gif', 'bmp', 'avif', 'svg', 'ico', 'tiff', 'tif',
|
||||
'heic', 'heif', 'psd',
|
||||
]);
|
||||
|
||||
/** Audio formats for <audio> */
|
||||
@@ -31,23 +25,371 @@ const AUDIO_FORMATS = new Set(['mp3', 'wav', 'ogg', 'aac', 'flac', 'm4a', 'opus'
|
||||
/** Video formats for <video> */
|
||||
const VIDEO_FORMATS = new Set(['mp4', 'webm', 'mov', 'avi', 'mkv']);
|
||||
|
||||
function getPreviewType(format: string): 'text' | 'iframe' | 'image' | 'audio' | 'video' | 'none' {
|
||||
/** Formats rendered via iframe (PDF natively, HTML as-is) */
|
||||
const IFRAME_FORMATS = new Set(['pdf', 'html', 'htm']);
|
||||
|
||||
type PreviewKind =
|
||||
| 'image'
|
||||
| 'audio'
|
||||
| 'video'
|
||||
| 'iframe' // pdf, html
|
||||
| 'markdown' // .md → rendered HTML
|
||||
| 'json' // syntax-highlighted JSON
|
||||
| 'csv' // table view via papaparse
|
||||
| 'tsv' // table view via papaparse
|
||||
| 'xml' // syntax-highlighted XML
|
||||
| 'yaml' // syntax-highlighted YAML
|
||||
| 'toml' // syntax-highlighted TOML
|
||||
| 'docx' // mammoth → HTML
|
||||
| 'rtf' // strip RTF → text
|
||||
| 'spreadsheet' // xlsx/xls/ods → table via SheetJS
|
||||
| 'pptx' // extract slides text
|
||||
| 'epub' // extract content from epub zip
|
||||
| 'font' // font preview via opentype.js
|
||||
| 'plaintext' // .txt, .ini, .env, .properties, .ndjson, .jsonl, .sql, etc.
|
||||
| 'none';
|
||||
|
||||
const PLAINTEXT_FORMATS = new Set([
|
||||
'txt', 'ini', 'env', 'properties', 'ndjson', 'jsonl', 'sql', 'rst', 'tex', 'log',
|
||||
]);
|
||||
|
||||
function getPreviewKind(format: string): PreviewKind {
|
||||
if (IMAGE_FORMATS.has(format)) return 'image';
|
||||
if (IFRAME_FORMATS.has(format)) return 'iframe';
|
||||
if (AUDIO_FORMATS.has(format)) return 'audio';
|
||||
if (VIDEO_FORMATS.has(format)) return 'video';
|
||||
if (TEXT_FORMATS.has(format)) return 'text';
|
||||
if (IFRAME_FORMATS.has(format)) return 'iframe';
|
||||
if (format === 'md') return 'markdown';
|
||||
if (format === 'json') return 'json';
|
||||
if (format === 'csv') return 'csv';
|
||||
if (format === 'tsv') return 'tsv';
|
||||
if (format === 'xml') return 'xml';
|
||||
if (format === 'yaml' || format === 'yml') return 'yaml';
|
||||
if (format === 'toml') return 'toml';
|
||||
if (format === 'docx') return 'docx';
|
||||
if (format === 'rtf') return 'rtf';
|
||||
if (format === 'xlsx' || format === 'xls' || format === 'ods') return 'spreadsheet';
|
||||
if (format === 'pptx') return 'pptx';
|
||||
if (format === 'epub') return 'epub';
|
||||
if (format === 'ttf' || format === 'otf' || format === 'woff' || format === 'woff2') return 'font';
|
||||
if (PLAINTEXT_FORMATS.has(format)) return 'plaintext';
|
||||
return 'none';
|
||||
}
|
||||
|
||||
/* ─── Syntax highlighting helpers (no external deps) ─── */
|
||||
|
||||
function escapeHtml(str: string): string {
|
||||
return str.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>').replace(/"/g, '"');
|
||||
}
|
||||
|
||||
function highlightJson(raw: string): string {
|
||||
try {
|
||||
// Pretty-print first
|
||||
const obj = JSON.parse(raw);
|
||||
const pretty = JSON.stringify(obj, null, 2);
|
||||
// Tokenize with regex
|
||||
return pretty.replace(
|
||||
/("(?:\\.|[^"\\])*")\s*:/g, // keys
|
||||
'<span style="color:#9333ea">$1</span>:'
|
||||
).replace(
|
||||
/:\s*("(?:\\.|[^"\\])*")/g, // string values
|
||||
': <span style="color:#059669">$1</span>'
|
||||
).replace(
|
||||
/:\s*(\d+\.?\d*)/g, // numbers
|
||||
': <span style="color:#d97706">$1</span>'
|
||||
).replace(
|
||||
/:\s*(true|false)/g, // booleans
|
||||
': <span style="color:#2563eb">$1</span>'
|
||||
).replace(
|
||||
/:\s*(null)/g, // null
|
||||
': <span style="color:#9ca3af">$1</span>'
|
||||
);
|
||||
} catch {
|
||||
return escapeHtml(raw);
|
||||
}
|
||||
}
|
||||
|
||||
function highlightXml(raw: string): string {
|
||||
const escaped = escapeHtml(raw);
|
||||
return escaped
|
||||
// Tags: <tagName ... >
|
||||
.replace(/<(\/?)([\w:.-]+)/g, '<$1<span style="color:#9333ea">$2</span>')
|
||||
// Attributes: key="value"
|
||||
.replace(/([\w:.-]+)="([^&]*)"/g,
|
||||
'<span style="color:#d97706">$1</span>="<span style="color:#059669">$2</span>"')
|
||||
// Comments
|
||||
.replace(/(<!--[\s\S]*?-->)/g, '<span style="color:#9ca3af">$1</span>');
|
||||
}
|
||||
|
||||
function highlightYaml(raw: string): string {
|
||||
return escapeHtml(raw)
|
||||
.split('\n')
|
||||
.map(line => {
|
||||
// Comments
|
||||
if (/^\s*#/.test(line)) return `<span style="color:#9ca3af">${line}</span>`;
|
||||
// Key: value
|
||||
const match = line.match(/^(\s*)([\w.-]+)(\s*:\s*)(.*)/);
|
||||
if (match) {
|
||||
const [, indent, key, colon, val] = match;
|
||||
let valHtml = val;
|
||||
if (/^(true|false)$/i.test(val)) valHtml = `<span style="color:#2563eb">${val}</span>`;
|
||||
else if (/^-?\d+\.?\d*$/.test(val)) valHtml = `<span style="color:#d97706">${val}</span>`;
|
||||
else if (/^["'].*["']$/.test(val)) valHtml = `<span style="color:#059669">${val}</span>`;
|
||||
else if (val === 'null' || val === '~') valHtml = `<span style="color:#9ca3af">${val}</span>`;
|
||||
else if (val) valHtml = `<span style="color:#059669">${val}</span>`;
|
||||
return `${indent}<span style="color:#9333ea">${key}</span>${colon}${valHtml}`;
|
||||
}
|
||||
// List items
|
||||
if (/^\s*-\s/.test(line)) {
|
||||
return line.replace(/^(\s*-\s+)(.*)/, '$1<span style="color:#059669">$2</span>');
|
||||
}
|
||||
return line;
|
||||
})
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
function highlightToml(raw: string): string {
|
||||
return escapeHtml(raw)
|
||||
.split('\n')
|
||||
.map(line => {
|
||||
// Comments
|
||||
if (/^\s*#/.test(line)) return `<span style="color:#9ca3af">${line}</span>`;
|
||||
// Section headers [section]
|
||||
if (/^\s*\[/.test(line)) return `<span style="color:#2563eb;font-weight:600">${line}</span>`;
|
||||
// Key = value
|
||||
const match = line.match(/^(\s*)([\w.-]+)(\s*=\s*)(.*)/);
|
||||
if (match) {
|
||||
const [, indent, key, eq, val] = match;
|
||||
let valHtml = val;
|
||||
if (/^(true|false)$/i.test(val)) valHtml = `<span style="color:#2563eb">${val}</span>`;
|
||||
else if (/^-?\d+\.?\d*$/.test(val)) valHtml = `<span style="color:#d97706">${val}</span>`;
|
||||
else if (/^".*"$/.test(val)) valHtml = `<span style="color:#059669">${val}</span>`;
|
||||
else if (val) valHtml = `<span style="color:#059669">${val}</span>`;
|
||||
return `${indent}<span style="color:#9333ea">${key}</span>${eq}${valHtml}`;
|
||||
}
|
||||
return line;
|
||||
})
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
function stripRtf(raw: string): string {
|
||||
// Basic RTF stripping: remove RTF control words and groups, extract text
|
||||
let result = raw
|
||||
.replace(/\\par[d]?/g, '\n')
|
||||
.replace(/\{\\[^{}]*\}/g, '') // remove groups like {\fonttbl ...}
|
||||
.replace(/\\[a-z]+\d*\s?/gi, '') // remove control words like \b, \fs24
|
||||
.replace(/[{}]/g, '') // remove remaining braces
|
||||
.replace(/\\'([0-9a-fA-F]{2})/g, (_m, hex) => String.fromCharCode(parseInt(hex, 16)))
|
||||
.trim();
|
||||
// Clean up excessive newlines
|
||||
result = result.replace(/\n{3,}/g, '\n\n');
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ─── CSV/TSV table renderer ─── */
|
||||
|
||||
function CsvTable({ text, delimiter }: { text: string; delimiter: string }) {
|
||||
const [rows, setRows] = useState<string[][]>([]);
|
||||
const [hasHeader, setHasHeader] = useState(true);
|
||||
|
||||
useEffect(() => {
|
||||
// Lazy-load papaparse
|
||||
import('papaparse').then((Papa) => {
|
||||
const result = Papa.default.parse(text, {
|
||||
delimiter: delimiter === '\t' ? '\t' : delimiter,
|
||||
skipEmptyLines: true,
|
||||
});
|
||||
setRows(result.data as string[][]);
|
||||
});
|
||||
}, [text, delimiter]);
|
||||
|
||||
if (rows.length === 0) return <div className="p-4 text-text-light font-mono text-sm">Parsing...</div>;
|
||||
|
||||
const headerRow = hasHeader ? rows[0] : null;
|
||||
const bodyRows = hasHeader ? rows.slice(1) : rows;
|
||||
const maxCols = Math.max(...rows.map(r => r.length));
|
||||
|
||||
return (
|
||||
<div className="p-4">
|
||||
<div className="flex items-center gap-2 mb-3">
|
||||
<span className="font-mono text-[11px] text-text-light">{rows.length} rows x {maxCols} cols</span>
|
||||
<button
|
||||
className="font-mono text-[11px] px-2 py-0.5 rounded border border-border-soft text-text-mid hover:bg-bg-warm transition-colors"
|
||||
onClick={() => setHasHeader(!hasHeader)}
|
||||
>
|
||||
{hasHeader ? 'Headers: ON' : 'Headers: OFF'}
|
||||
</button>
|
||||
</div>
|
||||
<div className="overflow-auto max-h-[65vh] rounded-xl border border-border-soft">
|
||||
<table className="w-full border-collapse bg-white text-[13px]">
|
||||
{headerRow && (
|
||||
<thead>
|
||||
<tr>
|
||||
<th className="px-3 py-2 text-left font-mono text-[11px] font-bold text-text-light bg-bg-warm border-b border-border-soft w-10">#</th>
|
||||
{headerRow.map((cell, i) => (
|
||||
<th
|
||||
key={i}
|
||||
className="px-3 py-2 text-left font-mono text-[11px] font-bold text-purple bg-bg-warm border-b border-border-soft whitespace-nowrap"
|
||||
>
|
||||
{cell}
|
||||
</th>
|
||||
))}
|
||||
</tr>
|
||||
</thead>
|
||||
)}
|
||||
<tbody>
|
||||
{bodyRows.slice(0, 500).map((row, ri) => (
|
||||
<tr key={ri} className={ri % 2 === 0 ? 'bg-white' : 'bg-bg-cream/40'}>
|
||||
<td className="px-3 py-1.5 font-mono text-[11px] text-text-light border-b border-border-soft/50 w-10">{ri + 1}</td>
|
||||
{Array.from({ length: maxCols }, (_, ci) => (
|
||||
<td
|
||||
key={ci}
|
||||
className="px-3 py-1.5 font-mono text-[12px] text-text-dark border-b border-border-soft/50 whitespace-nowrap max-w-[300px] truncate"
|
||||
title={row[ci] || ''}
|
||||
>
|
||||
{row[ci] || ''}
|
||||
</td>
|
||||
))}
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
{bodyRows.length > 500 && (
|
||||
<div className="text-center py-2 font-mono text-[11px] text-text-light bg-bg-warm border-t border-border-soft">
|
||||
Showing 500 of {bodyRows.length} rows
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
/* ─── Rendered HTML viewer (for Markdown, DOCX) ─── */
|
||||
|
||||
function RenderedHtmlFrame({ html, title }: { html: string; title: string }) {
|
||||
const iframeRef = useRef<HTMLIFrameElement>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (!iframeRef.current) return;
|
||||
const doc = iframeRef.current.contentDocument;
|
||||
if (!doc) return;
|
||||
|
||||
// Build a styled HTML page inside the iframe
|
||||
doc.open();
|
||||
doc.write(`<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<style>
|
||||
*, *::before, *::after { box-sizing: border-box; }
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif;
|
||||
line-height: 1.7;
|
||||
color: #2d1f14;
|
||||
padding: 24px 32px;
|
||||
margin: 0;
|
||||
max-width: 720px;
|
||||
font-size: 15px;
|
||||
background: #fffcf8;
|
||||
}
|
||||
h1, h2, h3, h4, h5, h6 {
|
||||
font-family: Georgia, 'Times New Roman', serif;
|
||||
color: #2d1f14;
|
||||
margin: 1.4em 0 0.6em;
|
||||
line-height: 1.3;
|
||||
}
|
||||
h1 { font-size: 1.8em; border-bottom: 2px solid rgba(180,140,100,0.15); padding-bottom: 8px; }
|
||||
h2 { font-size: 1.4em; border-bottom: 1px solid rgba(180,140,100,0.1); padding-bottom: 6px; }
|
||||
h3 { font-size: 1.2em; }
|
||||
p { margin: 0.8em 0; }
|
||||
a { color: #9333ea; text-decoration: underline; }
|
||||
code {
|
||||
font-family: 'JetBrains Mono', 'Fira Code', monospace;
|
||||
background: rgba(180,140,100,0.08);
|
||||
padding: 2px 5px;
|
||||
border-radius: 4px;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
pre {
|
||||
background: #2d1f14;
|
||||
color: #f5e6d3;
|
||||
padding: 16px 20px;
|
||||
border-radius: 10px;
|
||||
overflow-x: auto;
|
||||
font-size: 13px;
|
||||
line-height: 1.5;
|
||||
}
|
||||
pre code {
|
||||
background: none;
|
||||
padding: 0;
|
||||
color: inherit;
|
||||
}
|
||||
blockquote {
|
||||
border-left: 3px solid #a78bfa;
|
||||
margin: 1em 0;
|
||||
padding: 8px 16px;
|
||||
background: rgba(167,139,250,0.06);
|
||||
color: #7a6552;
|
||||
}
|
||||
ul, ol { padding-left: 24px; }
|
||||
li { margin: 4px 0; }
|
||||
table { border-collapse: collapse; width: 100%; margin: 1em 0; }
|
||||
th, td { border: 1px solid rgba(180,140,100,0.15); padding: 8px 12px; text-align: left; }
|
||||
th { background: rgba(180,140,100,0.06); font-weight: 600; }
|
||||
img { max-width: 100%; height: auto; border-radius: 8px; }
|
||||
hr { border: none; height: 1px; background: rgba(180,140,100,0.15); margin: 1.5em 0; }
|
||||
</style>
|
||||
</head>
|
||||
<body>${html}</body>
|
||||
</html>`);
|
||||
doc.close();
|
||||
}, [html]);
|
||||
|
||||
return (
|
||||
<iframe
|
||||
ref={iframeRef}
|
||||
className="w-full h-[70vh] border-none"
|
||||
title={title}
|
||||
sandbox="allow-same-origin"
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
/* ─── Syntax-highlighted code block ─── */
|
||||
|
||||
function HighlightedCode({ html, label }: { html: string; label: string }) {
|
||||
return (
|
||||
<div className="p-4">
|
||||
<div className="flex items-center gap-2 mb-3">
|
||||
<span className="inline-flex items-center px-2 py-0.5 font-mono text-[10px] font-bold uppercase tracking-widest rounded bg-purple/10 text-purple border border-purple/15">
|
||||
{label}
|
||||
</span>
|
||||
</div>
|
||||
<pre
|
||||
className="w-full p-5 bg-white rounded-xl border border-border-soft font-mono text-[13px] leading-relaxed overflow-auto max-h-[65vh] whitespace-pre-wrap break-words"
|
||||
dangerouslySetInnerHTML={{ __html: html }}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
/* ─── Main component ─── */
|
||||
|
||||
export function PreviewModal({ file, onClose, onDownload }: PreviewModalProps) {
|
||||
const [textContent, setTextContent] = useState<string | null>(null);
|
||||
const [renderedContent, setRenderedContent] = useState<{
|
||||
kind: 'html' | 'highlighted' | 'table' | 'spreadsheet' | 'plaintext' | 'font';
|
||||
html?: string;
|
||||
text?: string;
|
||||
label?: string;
|
||||
delimiter?: string;
|
||||
sheetData?: { headers: string[]; rows: string[][] };
|
||||
fontDataUrl?: string;
|
||||
} | null>(null);
|
||||
const [blobUrl, setBlobUrl] = useState<string | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const overlayRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const targetFormat = file?.targetFormat || '';
|
||||
const previewType = getPreviewType(targetFormat);
|
||||
const previewKind = getPreviewKind(targetFormat);
|
||||
|
||||
// Load preview content
|
||||
useEffect(() => {
|
||||
@@ -56,40 +398,273 @@ export function PreviewModal({ file, onClose, onDownload }: PreviewModalProps) {
|
||||
return;
|
||||
}
|
||||
|
||||
let cancelled = false;
|
||||
setLoading(true);
|
||||
setTextContent(null);
|
||||
setRenderedContent(null);
|
||||
|
||||
const type = getPreviewType(file.targetFormat);
|
||||
const kind = getPreviewKind(file.targetFormat);
|
||||
|
||||
if (type === 'text') {
|
||||
// Read blob as text
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e) => {
|
||||
setTextContent(e.target?.result as string);
|
||||
setLoading(false);
|
||||
};
|
||||
reader.onerror = () => {
|
||||
setTextContent('[Failed to read file content]');
|
||||
setLoading(false);
|
||||
};
|
||||
reader.readAsText(file.convertedBlob);
|
||||
} else if (type === 'image' || type === 'iframe' || type === 'audio' || type === 'video') {
|
||||
if (kind === 'image' || kind === 'iframe' || kind === 'audio' || kind === 'video') {
|
||||
const url = URL.createObjectURL(file.convertedBlob);
|
||||
setBlobUrl(url);
|
||||
setLoading(false);
|
||||
} else if (kind === 'docx') {
|
||||
// Convert DOCX blob to HTML via mammoth (lazy loaded)
|
||||
import('mammoth').then(async (mammoth) => {
|
||||
if (cancelled) return;
|
||||
try {
|
||||
const arrayBuffer = await file.convertedBlob!.arrayBuffer();
|
||||
const result = await mammoth.convertToHtml({ arrayBuffer });
|
||||
if (!cancelled) {
|
||||
setRenderedContent({ kind: 'html', html: result.value });
|
||||
setLoading(false);
|
||||
}
|
||||
} catch {
|
||||
if (!cancelled) {
|
||||
setRenderedContent({ kind: 'plaintext', text: '[Failed to parse DOCX for preview]' });
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (kind === 'spreadsheet') {
|
||||
// Parse xlsx/xls/ods via SheetJS and render as table
|
||||
import('xlsx').then(async (XLSX) => {
|
||||
if (cancelled) return;
|
||||
try {
|
||||
const buffer = await file.convertedBlob!.arrayBuffer();
|
||||
const wb = XLSX.read(buffer, { type: 'array' });
|
||||
const ws = wb.Sheets[wb.SheetNames[0]];
|
||||
const jsonData = XLSX.utils.sheet_to_json<string[]>(ws, { header: 1 });
|
||||
const headers = (jsonData[0] || []).map(String);
|
||||
const rows = jsonData.slice(1).map(row => row.map(String));
|
||||
if (!cancelled) {
|
||||
setRenderedContent({ kind: 'spreadsheet', sheetData: { headers, rows } });
|
||||
setLoading(false);
|
||||
}
|
||||
} catch {
|
||||
if (!cancelled) {
|
||||
setRenderedContent({ kind: 'plaintext', text: '[Failed to parse spreadsheet for preview]' });
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (kind === 'pptx') {
|
||||
// Extract slide text from PPTX via jszip
|
||||
import('jszip').then(async (JSZipModule) => {
|
||||
if (cancelled) return;
|
||||
try {
|
||||
const JSZip = JSZipModule.default;
|
||||
const buffer = await file.convertedBlob!.arrayBuffer();
|
||||
const zip = await JSZip.loadAsync(buffer);
|
||||
|
||||
const slideFiles = Object.keys(zip.files)
|
||||
.filter(f => /^ppt\/slides\/slide\d+\.xml$/i.test(f))
|
||||
.sort((a, b) => {
|
||||
const numA = parseInt(a.match(/slide(\d+)/)?.[1] || '0');
|
||||
const numB = parseInt(b.match(/slide(\d+)/)?.[1] || '0');
|
||||
return numA - numB;
|
||||
});
|
||||
|
||||
const slidesHtml: string[] = [];
|
||||
for (const slideFile of slideFiles) {
|
||||
const content = await zip.file(slideFile)?.async('string');
|
||||
if (!content) continue;
|
||||
const texts: string[] = [];
|
||||
const textRegex = /<a:t>([^<]*)<\/a:t>/g;
|
||||
let match;
|
||||
while ((match = textRegex.exec(content)) !== null) {
|
||||
if (match[1].trim()) texts.push(escapeHtml(match[1]));
|
||||
}
|
||||
const slideNum = slideFile.match(/slide(\d+)/)?.[1] || '?';
|
||||
slidesHtml.push(`<div style="margin:1.5em 0;padding:1em 1.5em;border:1px solid rgba(180,140,100,0.15);border-radius:12px;background:white"><h3 style="margin:0 0 0.5em;color:#9333ea;font-size:14px;font-family:monospace">Slide ${slideNum}</h3><p style="margin:0;line-height:1.6;color:#2d1f14">${texts.join('<br/>')}</p></div>`);
|
||||
}
|
||||
|
||||
if (!cancelled) {
|
||||
const html = slidesHtml.length > 0
|
||||
? `<h2 style="font-family:Georgia,serif;color:#2d1f14;margin-bottom:0.5em">${escapeHtml(file.convertedName || file.name)}</h2><p style="color:#7a6552;font-size:13px;font-family:monospace">${slideFiles.length} slides</p>${slidesHtml.join('')}`
|
||||
: '<p style="color:#7a6552">No slide content found in this presentation.</p>';
|
||||
setRenderedContent({ kind: 'html', html });
|
||||
setLoading(false);
|
||||
}
|
||||
} catch {
|
||||
if (!cancelled) {
|
||||
setRenderedContent({ kind: 'plaintext', text: '[Failed to parse PPTX for preview]' });
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (kind === 'epub') {
|
||||
// Extract EPUB content via jszip
|
||||
import('jszip').then(async (JSZipModule) => {
|
||||
if (cancelled) return;
|
||||
try {
|
||||
const JSZip = JSZipModule.default;
|
||||
const buffer = await file.convertedBlob!.arrayBuffer();
|
||||
const zip = await JSZip.loadAsync(buffer);
|
||||
|
||||
// Find XHTML content files in the EPUB
|
||||
const htmlFiles = Object.keys(zip.files)
|
||||
.filter(f => /\.(xhtml|html|htm)$/i.test(f) && !f.includes('nav') && !f.includes('toc'))
|
||||
.sort();
|
||||
|
||||
const contentParts: string[] = [];
|
||||
for (const htmlFile of htmlFiles.slice(0, 20)) { // Limit to 20 chapters
|
||||
const content = await zip.file(htmlFile)?.async('string');
|
||||
if (!content) continue;
|
||||
// Extract body content
|
||||
const bodyMatch = content.match(/<body[^>]*>([\s\S]*?)<\/body>/i);
|
||||
if (bodyMatch) {
|
||||
contentParts.push(bodyMatch[1]);
|
||||
}
|
||||
}
|
||||
|
||||
if (!cancelled) {
|
||||
const html = contentParts.length > 0
|
||||
? contentParts.join('<hr style="border:none;height:1px;background:rgba(180,140,100,0.15);margin:2em 0"/>')
|
||||
: '<p style="color:#7a6552">No readable content found in this EPUB.</p>';
|
||||
setRenderedContent({ kind: 'html', html });
|
||||
setLoading(false);
|
||||
}
|
||||
} catch {
|
||||
if (!cancelled) {
|
||||
setRenderedContent({ kind: 'plaintext', text: '[Failed to parse EPUB for preview]' });
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (kind === 'font') {
|
||||
// Preview font using opentype.js — render sample glyphs to canvas
|
||||
import('opentype.js').then(async (opentype) => {
|
||||
if (cancelled) return;
|
||||
try {
|
||||
const buffer = await file.convertedBlob!.arrayBuffer();
|
||||
const font = opentype.parse(buffer);
|
||||
|
||||
// Create a canvas with sample text
|
||||
const canvas = document.createElement('canvas');
|
||||
const scale = 2; // Retina
|
||||
canvas.width = 700 * scale;
|
||||
canvas.height = 500 * scale;
|
||||
const ctx = canvas.getContext('2d')!;
|
||||
ctx.scale(scale, scale);
|
||||
|
||||
// Background
|
||||
ctx.fillStyle = '#fffcf8';
|
||||
ctx.fillRect(0, 0, 700, 500);
|
||||
|
||||
// Font name
|
||||
ctx.fillStyle = '#2d1f14';
|
||||
ctx.font = '14px monospace';
|
||||
ctx.fillText(`${font.names.fontFamily?.en || 'Font'} — ${font.names.fontSubfamily?.en || ''}`, 24, 32);
|
||||
|
||||
// Draw sample text at various sizes
|
||||
const samples = [
|
||||
{ text: 'ABCDEFGHIJKLM', size: 48, y: 90 },
|
||||
{ text: 'NOPQRSTUVWXYZ', size: 48, y: 145 },
|
||||
{ text: 'abcdefghijklm', size: 44, y: 200 },
|
||||
{ text: 'nopqrstuvwxyz', size: 44, y: 250 },
|
||||
{ text: '0123456789', size: 44, y: 305 },
|
||||
{ text: 'The quick brown fox jumps', size: 32, y: 360 },
|
||||
{ text: 'over the lazy dog. 0123456789', size: 24, y: 405 },
|
||||
{ text: '!@#$%^&*()_+-=[]{}|;:\'",.<>?', size: 24, y: 445 },
|
||||
];
|
||||
|
||||
for (const sample of samples) {
|
||||
const path = font.getPath(sample.text, 24, sample.y, sample.size);
|
||||
path.fill = '#2d1f14';
|
||||
path.draw(ctx);
|
||||
}
|
||||
|
||||
const dataUrl = canvas.toDataURL('image/png');
|
||||
if (!cancelled) {
|
||||
setRenderedContent({ kind: 'font', fontDataUrl: dataUrl, label: font.names.fontFamily?.en || 'Font' });
|
||||
setLoading(false);
|
||||
}
|
||||
} catch {
|
||||
if (!cancelled) {
|
||||
setRenderedContent({ kind: 'plaintext', text: '[Failed to parse font for preview]' });
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (kind === 'markdown') {
|
||||
// Render Markdown → HTML via marked
|
||||
const reader = new FileReader();
|
||||
reader.onload = async (e) => {
|
||||
if (cancelled) return;
|
||||
try {
|
||||
const { marked } = await import('marked');
|
||||
const raw = e.target?.result as string;
|
||||
const html = await marked.parse(raw, { breaks: true, gfm: true });
|
||||
if (!cancelled) {
|
||||
setRenderedContent({ kind: 'html', html });
|
||||
setLoading(false);
|
||||
}
|
||||
} catch {
|
||||
if (!cancelled) {
|
||||
setRenderedContent({ kind: 'plaintext', text: e.target?.result as string });
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
};
|
||||
reader.onerror = () => {
|
||||
if (!cancelled) {
|
||||
setRenderedContent({ kind: 'plaintext', text: '[Failed to read file]' });
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
reader.readAsText(file.convertedBlob);
|
||||
} else if (kind === 'json' || kind === 'xml' || kind === 'yaml' || kind === 'toml' || kind === 'rtf' || kind === 'csv' || kind === 'tsv' || kind === 'plaintext') {
|
||||
// Read as text, then process
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e) => {
|
||||
if (cancelled) return;
|
||||
const raw = e.target?.result as string;
|
||||
|
||||
switch (kind) {
|
||||
case 'json':
|
||||
setRenderedContent({ kind: 'highlighted', html: highlightJson(raw), label: 'JSON' });
|
||||
break;
|
||||
case 'xml':
|
||||
setRenderedContent({ kind: 'highlighted', html: highlightXml(raw), label: 'XML' });
|
||||
break;
|
||||
case 'yaml':
|
||||
setRenderedContent({ kind: 'highlighted', html: highlightYaml(raw), label: 'YAML' });
|
||||
break;
|
||||
case 'toml':
|
||||
setRenderedContent({ kind: 'highlighted', html: highlightToml(raw), label: 'TOML' });
|
||||
break;
|
||||
case 'rtf':
|
||||
setRenderedContent({ kind: 'plaintext', text: stripRtf(raw) });
|
||||
break;
|
||||
case 'csv':
|
||||
setRenderedContent({ kind: 'table', text: raw, delimiter: ',' });
|
||||
break;
|
||||
case 'tsv':
|
||||
setRenderedContent({ kind: 'table', text: raw, delimiter: '\t' });
|
||||
break;
|
||||
default:
|
||||
setRenderedContent({ kind: 'plaintext', text: raw });
|
||||
}
|
||||
setLoading(false);
|
||||
};
|
||||
reader.onerror = () => {
|
||||
if (!cancelled) {
|
||||
setRenderedContent({ kind: 'plaintext', text: '[Failed to read file content]' });
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
reader.readAsText(file.convertedBlob);
|
||||
} else {
|
||||
setLoading(false);
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (blobUrl) {
|
||||
URL.revokeObjectURL(blobUrl);
|
||||
}
|
||||
cancelled = true;
|
||||
};
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [file?.convertedBlob, file?.targetFormat]);
|
||||
|
||||
// Cleanup blob URL on unmount
|
||||
// Clean up blob URL
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (blobUrl) URL.revokeObjectURL(blobUrl);
|
||||
@@ -117,6 +692,23 @@ export function PreviewModal({ file, onClose, onDownload }: PreviewModalProps) {
|
||||
|
||||
const convertedSize = file.convertedBlob ? formatFileSize(file.convertedBlob.size) : '';
|
||||
|
||||
/* ─── Format label + color for the badge ─── */
|
||||
const kindBadge: Record<string, { label: string; color: string }> = {
|
||||
markdown: { label: 'Rendered', color: 'purple' },
|
||||
json: { label: 'Highlighted', color: 'orange' },
|
||||
csv: { label: 'Table', color: 'blue' },
|
||||
tsv: { label: 'Table', color: 'blue' },
|
||||
xml: { label: 'Highlighted', color: 'teal' },
|
||||
yaml: { label: 'Highlighted', color: 'teal' },
|
||||
toml: { label: 'Highlighted', color: 'teal' },
|
||||
docx: { label: 'Rendered', color: 'purple' },
|
||||
spreadsheet: { label: 'Table', color: 'mint' },
|
||||
pptx: { label: 'Slides', color: 'orange' },
|
||||
epub: { label: 'Rendered', color: 'purple' },
|
||||
font: { label: 'Glyphs', color: 'teal' },
|
||||
};
|
||||
const badge = kindBadge[previewKind];
|
||||
|
||||
return (
|
||||
<AnimatePresence>
|
||||
{file && (
|
||||
@@ -146,6 +738,19 @@ export function PreviewModal({ file, onClose, onDownload }: PreviewModalProps) {
|
||||
</svg>
|
||||
<span className="font-mono text-[11px] font-bold uppercase tracking-wider">Preview</span>
|
||||
</div>
|
||||
{badge && (
|
||||
<span
|
||||
className={`font-mono text-[10px] font-bold uppercase tracking-widest px-2 py-0.5 rounded border
|
||||
${badge.color === 'purple' ? 'bg-purple/10 text-purple border-purple/15' : ''}
|
||||
${badge.color === 'orange' ? 'bg-orange/10 text-orange border-orange/15' : ''}
|
||||
${badge.color === 'blue' ? 'bg-blue/10 text-blue border-blue/15' : ''}
|
||||
${badge.color === 'teal' ? 'bg-teal/10 text-teal border-teal/15' : ''}
|
||||
${badge.color === 'mint' ? 'bg-mint/10 text-mint border-mint/15' : ''}
|
||||
`}
|
||||
>
|
||||
{badge.label}
|
||||
</span>
|
||||
)}
|
||||
<div className="min-w-0">
|
||||
<p className="text-sm font-semibold text-text-dark truncate" title={file.convertedName || ''}>
|
||||
{file.convertedName || file.name}
|
||||
@@ -189,7 +794,7 @@ export function PreviewModal({ file, onClose, onDownload }: PreviewModalProps) {
|
||||
<span className="font-mono text-xs text-text-light">Loading preview...</span>
|
||||
</div>
|
||||
</div>
|
||||
) : previewType === 'image' && blobUrl ? (
|
||||
) : previewKind === 'image' && blobUrl ? (
|
||||
<div className="flex items-center justify-center p-6 min-h-[300px]">
|
||||
{/* eslint-disable-next-line @next/next/no-img-element */}
|
||||
<img
|
||||
@@ -198,14 +803,14 @@ export function PreviewModal({ file, onClose, onDownload }: PreviewModalProps) {
|
||||
className="max-w-full max-h-[70vh] object-contain rounded-lg shadow-[0_4px_20px_rgba(0,0,0,0.08)]"
|
||||
/>
|
||||
</div>
|
||||
) : previewType === 'iframe' && blobUrl ? (
|
||||
) : previewKind === 'iframe' && blobUrl ? (
|
||||
<iframe
|
||||
src={blobUrl}
|
||||
className="w-full h-[70vh] border-none"
|
||||
title="File preview"
|
||||
sandbox="allow-same-origin"
|
||||
/>
|
||||
) : previewType === 'audio' && blobUrl ? (
|
||||
) : previewKind === 'audio' && blobUrl ? (
|
||||
<div className="flex flex-col items-center justify-center gap-6 p-10 min-h-[250px]">
|
||||
<div className="flex items-center justify-center w-20 h-20 rounded-full bg-purple/10 border border-purple/20">
|
||||
<svg width="36" height="36" viewBox="0 0 24 24" fill="none" stroke="#a78bfa" strokeWidth="1.5">
|
||||
@@ -221,7 +826,7 @@ export function PreviewModal({ file, onClose, onDownload }: PreviewModalProps) {
|
||||
{file.convertedName}
|
||||
</p>
|
||||
</div>
|
||||
) : previewType === 'video' && blobUrl ? (
|
||||
) : previewKind === 'video' && blobUrl ? (
|
||||
<div className="flex items-center justify-center p-4">
|
||||
<video
|
||||
controls
|
||||
@@ -232,12 +837,72 @@ export function PreviewModal({ file, onClose, onDownload }: PreviewModalProps) {
|
||||
Your browser does not support video playback.
|
||||
</video>
|
||||
</div>
|
||||
) : previewType === 'text' && textContent !== null ? (
|
||||
) : renderedContent?.kind === 'html' && renderedContent.html ? (
|
||||
<RenderedHtmlFrame html={renderedContent.html} title={file.convertedName || 'Preview'} />
|
||||
) : renderedContent?.kind === 'highlighted' && renderedContent.html ? (
|
||||
<HighlightedCode html={renderedContent.html} label={renderedContent.label || ''} />
|
||||
) : renderedContent?.kind === 'table' && renderedContent.text ? (
|
||||
<CsvTable text={renderedContent.text} delimiter={renderedContent.delimiter || ','} />
|
||||
) : renderedContent?.kind === 'spreadsheet' && renderedContent.sheetData ? (
|
||||
<div className="p-4">
|
||||
<pre className="w-full p-4 bg-white rounded-xl border border-border-soft font-mono text-[13px] leading-relaxed text-text-dark overflow-auto max-h-[70vh] whitespace-pre-wrap break-words">
|
||||
{textContent.length > 100000
|
||||
? textContent.slice(0, 100000) + '\n\n... [truncated — file too large for preview]'
|
||||
: textContent}
|
||||
<div className="flex items-center gap-2 mb-3">
|
||||
<span className="inline-flex items-center px-2 py-0.5 font-mono text-[10px] font-bold uppercase tracking-widest rounded bg-mint/10 text-mint border border-mint/15">
|
||||
Spreadsheet
|
||||
</span>
|
||||
<span className="font-mono text-[11px] text-text-light">
|
||||
{renderedContent.sheetData.rows.length} rows x {renderedContent.sheetData.headers.length} cols
|
||||
</span>
|
||||
</div>
|
||||
<div className="overflow-auto max-h-[65vh] rounded-xl border border-border-soft">
|
||||
<table className="w-full border-collapse bg-white text-[13px]">
|
||||
<thead>
|
||||
<tr>
|
||||
<th className="px-3 py-2 text-left font-mono text-[11px] font-bold text-text-light bg-bg-warm border-b border-border-soft w-10">#</th>
|
||||
{renderedContent.sheetData.headers.map((h, i) => (
|
||||
<th key={i} className="px-3 py-2 text-left font-mono text-[11px] font-bold text-purple bg-bg-warm border-b border-border-soft whitespace-nowrap">
|
||||
{h}
|
||||
</th>
|
||||
))}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{renderedContent.sheetData.rows.slice(0, 500).map((row, ri) => (
|
||||
<tr key={ri} className={ri % 2 === 0 ? 'bg-white' : 'bg-bg-cream/40'}>
|
||||
<td className="px-3 py-1.5 font-mono text-[11px] text-text-light border-b border-border-soft/50 w-10">{ri + 1}</td>
|
||||
{renderedContent.sheetData!.headers.map((_, ci) => (
|
||||
<td key={ci} className="px-3 py-1.5 font-mono text-[12px] text-text-dark border-b border-border-soft/50 whitespace-nowrap max-w-[300px] truncate" title={row[ci] || ''}>
|
||||
{row[ci] || ''}
|
||||
</td>
|
||||
))}
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
{renderedContent.sheetData.rows.length > 500 && (
|
||||
<div className="text-center py-2 font-mono text-[11px] text-text-light bg-bg-warm border-t border-border-soft">
|
||||
Showing 500 of {renderedContent.sheetData.rows.length} rows
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
) : renderedContent?.kind === 'font' && renderedContent.fontDataUrl ? (
|
||||
<div className="flex flex-col items-center justify-center p-6 min-h-[300px] gap-3">
|
||||
<span className="inline-flex items-center px-2 py-0.5 font-mono text-[10px] font-bold uppercase tracking-widest rounded bg-teal/10 text-teal border border-teal/15">
|
||||
{renderedContent.label}
|
||||
</span>
|
||||
{/* eslint-disable-next-line @next/next/no-img-element */}
|
||||
<img
|
||||
src={renderedContent.fontDataUrl}
|
||||
alt="Font preview"
|
||||
className="max-w-full max-h-[70vh] object-contain rounded-lg shadow-[0_4px_20px_rgba(0,0,0,0.08)]"
|
||||
/>
|
||||
</div>
|
||||
) : renderedContent?.kind === 'plaintext' && renderedContent.text !== undefined ? (
|
||||
<div className="p-4">
|
||||
<pre className="w-full p-5 bg-white rounded-xl border border-border-soft font-mono text-[13px] leading-relaxed text-text-dark overflow-auto max-h-[65vh] whitespace-pre-wrap break-words">
|
||||
{renderedContent.text.length > 100000
|
||||
? renderedContent.text.slice(0, 100000) + '\n\n... [truncated — file too large for preview]'
|
||||
: renderedContent.text}
|
||||
</pre>
|
||||
</div>
|
||||
) : (
|
||||
|
||||
@@ -6,6 +6,11 @@ import { convertImage } from '@/lib/converters/imageConverter';
|
||||
import { convertData } from '@/lib/converters/dataConverter';
|
||||
import { convertDocument } from '@/lib/converters/documentConverter';
|
||||
import { convertMedia } from '@/lib/converters/mediaConverter';
|
||||
import { convertSpreadsheet, isSpreadsheetConversion } from '@/lib/converters/spreadsheetConverter';
|
||||
import { convertEbook, isEbookConversion } from '@/lib/converters/ebookConverter';
|
||||
import { convertPresentation, isPresentationConversion } from '@/lib/converters/presentationConverter';
|
||||
import { convertFont, isFontConversion } from '@/lib/converters/fontConverter';
|
||||
import { getExtension } from '@/lib/fileDetector';
|
||||
|
||||
export function useConversion(
|
||||
updateFile: (id: string, updates: Partial<UploadedFile>) => void
|
||||
@@ -24,6 +29,18 @@ export function useConversion(
|
||||
};
|
||||
|
||||
let result;
|
||||
const ext = getExtension(file.file.name);
|
||||
|
||||
// Route spreadsheet conversions (xlsx/xls/ods sources or targets)
|
||||
if (file.category === 'data' && isSpreadsheetConversion(ext, file.targetFormat)) {
|
||||
result = await convertSpreadsheet(file.file, file.targetFormat, onProgress);
|
||||
} else if (file.category === 'data' && isFontConversion(ext, file.targetFormat)) {
|
||||
result = await convertFont(file.file, file.targetFormat, onProgress);
|
||||
} else if (file.category === 'document' && isPresentationConversion(ext, file.targetFormat)) {
|
||||
result = await convertPresentation(file.file, file.targetFormat, onProgress);
|
||||
} else if (file.category === 'document' && isEbookConversion(ext, file.targetFormat)) {
|
||||
result = await convertEbook(file.file, file.targetFormat, onProgress);
|
||||
} else {
|
||||
switch (file.category) {
|
||||
case 'image':
|
||||
result = await convertImage(file.file, file.targetFormat, onProgress);
|
||||
@@ -41,6 +58,7 @@ export function useConversion(
|
||||
default:
|
||||
throw new Error(`Unsupported file category: ${file.category}`);
|
||||
}
|
||||
}
|
||||
|
||||
updateFile(file.id, {
|
||||
status: 'done',
|
||||
|
||||
+43
-14
@@ -12,16 +12,21 @@ const IMAGE_CONVERSIONS: Record<string, string[]> = {
|
||||
avif: ['png', 'jpg', 'webp', 'gif', 'bmp', 'tiff'],
|
||||
svg: ['png', 'jpg', 'webp', 'gif', 'bmp', 'avif', 'tiff'],
|
||||
ico: ['png', 'jpg', 'webp', 'gif', 'bmp'],
|
||||
heic: ['png', 'jpg', 'webp', 'gif', 'bmp', 'avif', 'tiff'],
|
||||
heif: ['png', 'jpg', 'webp', 'gif', 'bmp', 'avif', 'tiff'],
|
||||
psd: ['png', 'jpg', 'webp', 'gif', 'bmp', 'avif', 'tiff', 'ico'],
|
||||
};
|
||||
|
||||
const DOCUMENT_CONVERSIONS: Record<string, string[]> = {
|
||||
pdf: ['txt', 'html', 'md', 'docx'],
|
||||
docx: ['pdf', 'html', 'txt', 'md'],
|
||||
md: ['html', 'pdf', 'txt', 'docx'],
|
||||
html: ['pdf', 'txt', 'md', 'docx'],
|
||||
htm: ['pdf', 'txt', 'md', 'docx'],
|
||||
txt: ['pdf', 'html', 'md', 'docx'],
|
||||
pdf: ['txt', 'html', 'md', 'docx', 'epub'],
|
||||
docx: ['pdf', 'html', 'txt', 'md', 'epub'],
|
||||
md: ['html', 'pdf', 'txt', 'docx', 'epub', 'pptx'],
|
||||
html: ['pdf', 'txt', 'md', 'docx', 'epub', 'pptx'],
|
||||
htm: ['pdf', 'txt', 'md', 'docx', 'epub', 'pptx'],
|
||||
txt: ['pdf', 'html', 'md', 'docx', 'epub', 'pptx'],
|
||||
rtf: ['txt', 'html', 'md', 'pdf', 'docx'],
|
||||
epub: ['txt', 'html', 'md', 'pdf'],
|
||||
pptx: ['txt', 'html', 'pdf', 'md'],
|
||||
};
|
||||
|
||||
const AUDIO_CONVERSIONS: Record<string, string[]> = {
|
||||
@@ -47,13 +52,31 @@ const VIDEO_CONVERSIONS: Record<string, string[]> = {
|
||||
};
|
||||
|
||||
const DATA_CONVERSIONS: Record<string, string[]> = {
|
||||
csv: ['json', 'xml', 'yaml', 'tsv', 'toml'],
|
||||
json: ['csv', 'xml', 'yaml', 'tsv', 'toml'],
|
||||
xml: ['json', 'csv', 'yaml', 'tsv', 'toml'],
|
||||
yaml: ['json', 'csv', 'xml', 'tsv', 'toml'],
|
||||
yml: ['json', 'csv', 'xml', 'tsv', 'toml'],
|
||||
tsv: ['csv', 'json', 'xml', 'yaml', 'toml'],
|
||||
toml: ['json', 'csv', 'xml', 'yaml', 'tsv'],
|
||||
csv: ['json', 'xml', 'yaml', 'tsv', 'toml', 'xlsx', 'ini', 'env', 'properties', 'ndjson', 'sql'],
|
||||
json: ['csv', 'xml', 'yaml', 'tsv', 'toml', 'xlsx', 'ini', 'env', 'properties', 'ndjson', 'sql'],
|
||||
xml: ['json', 'csv', 'yaml', 'tsv', 'toml', 'xlsx'],
|
||||
yaml: ['json', 'csv', 'xml', 'tsv', 'toml', 'xlsx', 'ini', 'env', 'properties', 'ndjson', 'sql'],
|
||||
yml: ['json', 'csv', 'xml', 'tsv', 'toml', 'xlsx', 'ini', 'env', 'properties', 'ndjson', 'sql'],
|
||||
tsv: ['csv', 'json', 'xml', 'yaml', 'toml', 'xlsx', 'ndjson', 'sql'],
|
||||
toml: ['json', 'csv', 'xml', 'yaml', 'tsv', 'xlsx'],
|
||||
// Key-value formats
|
||||
ini: ['json', 'yaml', 'toml', 'env', 'properties', 'xml', 'csv'],
|
||||
env: ['json', 'yaml', 'toml', 'ini', 'properties', 'csv'],
|
||||
properties: ['json', 'yaml', 'toml', 'ini', 'env', 'csv'],
|
||||
// Line-delimited JSON
|
||||
ndjson: ['json', 'csv', 'tsv', 'yaml', 'xml', 'xlsx', 'sql'],
|
||||
jsonl: ['json', 'csv', 'tsv', 'yaml', 'xml', 'xlsx', 'sql'],
|
||||
// SQL
|
||||
sql: ['json', 'csv', 'tsv', 'yaml', 'xlsx'],
|
||||
// Spreadsheets
|
||||
xlsx: ['csv', 'json', 'tsv', 'xml', 'yaml', 'toml', 'ods', 'html', 'txt', 'ndjson', 'sql'],
|
||||
xls: ['xlsx', 'csv', 'json', 'tsv', 'xml', 'yaml', 'toml', 'ods', 'html', 'txt', 'ndjson', 'sql'],
|
||||
ods: ['xlsx', 'csv', 'json', 'tsv', 'xml', 'yaml', 'toml', 'html', 'txt', 'ndjson', 'sql'],
|
||||
// Fonts
|
||||
ttf: ['otf', 'woff', 'woff2'],
|
||||
otf: ['ttf', 'woff', 'woff2'],
|
||||
woff: ['ttf', 'otf', 'woff2'],
|
||||
woff2: ['ttf', 'otf', 'woff'],
|
||||
};
|
||||
|
||||
const ALL_CONVERSIONS: Record<FileCategory, Record<string, string[]>> = {
|
||||
@@ -77,15 +100,21 @@ export function getDefaultTarget(category: FileCategory, extension: string): str
|
||||
// Images → WebP (modern, smaller)
|
||||
png: 'webp', jpg: 'webp', jpeg: 'webp', gif: 'webp',
|
||||
bmp: 'png', tiff: 'png', tif: 'png', avif: 'png', svg: 'png', ico: 'png',
|
||||
heic: 'jpg', heif: 'jpg', psd: 'png',
|
||||
// Documents → PDF (except PDF → DOCX)
|
||||
docx: 'pdf', md: 'html', html: 'pdf', htm: 'pdf', txt: 'pdf',
|
||||
pdf: 'docx', rtf: 'docx',
|
||||
pdf: 'docx', rtf: 'docx', epub: 'html', pptx: 'pdf',
|
||||
// Audio → MP3
|
||||
wav: 'mp3', flac: 'mp3', ogg: 'mp3', aac: 'mp3', m4a: 'mp3', wma: 'mp3', opus: 'mp3', mp3: 'wav',
|
||||
// Video → MP4
|
||||
avi: 'mp4', mov: 'mp4', mkv: 'mp4', flv: 'mp4', wmv: 'mp4', m4v: 'mp4', mp4: 'webm', webm: 'mp4',
|
||||
// Data → JSON
|
||||
csv: 'json', xml: 'json', yaml: 'json', yml: 'json', tsv: 'csv', json: 'csv', toml: 'json',
|
||||
ini: 'json', env: 'json', properties: 'json', ndjson: 'json', jsonl: 'json', sql: 'json',
|
||||
// Spreadsheets → CSV
|
||||
xlsx: 'csv', xls: 'csv', ods: 'csv',
|
||||
// Fonts → WOFF2 (modern web standard)
|
||||
ttf: 'woff2', otf: 'woff2', woff: 'woff2', woff2: 'ttf',
|
||||
};
|
||||
|
||||
return defaults[extension] || formats[0];
|
||||
|
||||
@@ -14,6 +14,8 @@ async function readFileAsText(file: File): Promise<string> {
|
||||
});
|
||||
}
|
||||
|
||||
/* ── CSV / TSV ── */
|
||||
|
||||
function csvToJson(text: string): object[] {
|
||||
const result = Papa.parse(text, { header: true, skipEmptyLines: true });
|
||||
return result.data as object[];
|
||||
@@ -34,6 +36,8 @@ function jsonToTsv(data: unknown): string {
|
||||
return Papa.unparse(arr, { delimiter: '\t' });
|
||||
}
|
||||
|
||||
/* ── XML ── */
|
||||
|
||||
function xmlToJson(text: string): unknown {
|
||||
const parser = new XMLParser({ ignoreAttributes: false });
|
||||
return parser.parse(text);
|
||||
@@ -44,6 +48,8 @@ function jsonToXml(data: unknown): string {
|
||||
return builder.build(typeof data === 'string' ? JSON.parse(data) : data);
|
||||
}
|
||||
|
||||
/* ── YAML ── */
|
||||
|
||||
function jsonToYaml(data: unknown): string {
|
||||
return yaml.dump(typeof data === 'string' ? JSON.parse(data) : data);
|
||||
}
|
||||
@@ -52,6 +58,8 @@ function yamlToJson(text: string): unknown {
|
||||
return yaml.load(text);
|
||||
}
|
||||
|
||||
/* ── TOML ── */
|
||||
|
||||
async function tomlToJson(text: string): Promise<unknown> {
|
||||
const TOML = await import('smol-toml');
|
||||
return TOML.parse(text);
|
||||
@@ -63,6 +71,211 @@ async function jsonToToml(data: unknown): Promise<string> {
|
||||
return TOML.stringify(obj as Record<string, unknown>);
|
||||
}
|
||||
|
||||
/* ── INI ── */
|
||||
|
||||
function iniToJson(text: string): Record<string, unknown> {
|
||||
const result: Record<string, unknown> = {};
|
||||
let currentSection = '';
|
||||
|
||||
for (const line of text.split(/\r?\n/)) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed || trimmed.startsWith(';') || trimmed.startsWith('#')) continue;
|
||||
|
||||
const sectionMatch = trimmed.match(/^\[(.+)\]$/);
|
||||
if (sectionMatch) {
|
||||
currentSection = sectionMatch[1];
|
||||
if (!result[currentSection]) result[currentSection] = {};
|
||||
continue;
|
||||
}
|
||||
|
||||
const kvMatch = trimmed.match(/^([^=]+)=(.*)$/);
|
||||
if (kvMatch) {
|
||||
const key = kvMatch[1].trim();
|
||||
let value: unknown = kvMatch[2].trim();
|
||||
// Auto-type: numbers, booleans
|
||||
if (value === 'true') value = true;
|
||||
else if (value === 'false') value = false;
|
||||
else if (/^-?\d+\.?\d*$/.test(value as string)) value = Number(value);
|
||||
|
||||
if (currentSection) {
|
||||
(result[currentSection] as Record<string, unknown>)[key] = value;
|
||||
} else {
|
||||
result[key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function jsonToIni(data: unknown): string {
|
||||
const obj = typeof data === 'string' ? JSON.parse(data) : data;
|
||||
if (typeof obj !== 'object' || obj === null) return String(obj);
|
||||
|
||||
const lines: string[] = [];
|
||||
const topLevel: string[] = [];
|
||||
const sections: string[] = [];
|
||||
|
||||
for (const [key, value] of Object.entries(obj as Record<string, unknown>)) {
|
||||
if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
|
||||
sections.push(`[${key}]`);
|
||||
for (const [k, v] of Object.entries(value as Record<string, unknown>)) {
|
||||
sections.push(`${k}=${v}`);
|
||||
}
|
||||
sections.push('');
|
||||
} else {
|
||||
topLevel.push(`${key}=${value}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (topLevel.length) lines.push(...topLevel, '');
|
||||
lines.push(...sections);
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
/* ── ENV ── */
|
||||
|
||||
function envToJson(text: string): Record<string, string> {
|
||||
const result: Record<string, string> = {};
|
||||
for (const line of text.split(/\r?\n/)) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed || trimmed.startsWith('#')) continue;
|
||||
const eqIdx = trimmed.indexOf('=');
|
||||
if (eqIdx === -1) continue;
|
||||
const key = trimmed.substring(0, eqIdx).trim();
|
||||
let value = trimmed.substring(eqIdx + 1).trim();
|
||||
// Strip surrounding quotes
|
||||
if ((value.startsWith('"') && value.endsWith('"')) ||
|
||||
(value.startsWith("'") && value.endsWith("'"))) {
|
||||
value = value.slice(1, -1);
|
||||
}
|
||||
result[key] = value;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function jsonToEnv(data: unknown): string {
|
||||
const obj = typeof data === 'string' ? JSON.parse(data) : data;
|
||||
if (typeof obj !== 'object' || obj === null) return '';
|
||||
const flat = flattenForEnv(obj as Record<string, unknown>);
|
||||
return Object.entries(flat).map(([k, v]) => `${k}=${v}`).join('\n');
|
||||
}
|
||||
|
||||
function flattenForEnv(obj: Record<string, unknown>, prefix = ''): Record<string, string> {
|
||||
const result: Record<string, string> = {};
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
const envKey = prefix ? `${prefix}_${key}` : key;
|
||||
if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
|
||||
Object.assign(result, flattenForEnv(value as Record<string, unknown>, envKey));
|
||||
} else {
|
||||
result[envKey.toUpperCase()] = String(value);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ── Properties (Java .properties) ── */
|
||||
|
||||
function propertiesToJson(text: string): Record<string, string> {
|
||||
const result: Record<string, string> = {};
|
||||
for (const line of text.split(/\r?\n/)) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed || trimmed.startsWith('#') || trimmed.startsWith('!')) continue;
|
||||
// Split on first = or :
|
||||
const match = trimmed.match(/^([^=:]+)[=:](.*)$/);
|
||||
if (match) {
|
||||
result[match[1].trim()] = match[2].trim();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function jsonToProperties(data: unknown): string {
|
||||
const obj = typeof data === 'string' ? JSON.parse(data) : data;
|
||||
if (typeof obj !== 'object' || obj === null) return '';
|
||||
const flat = flattenForEnv(obj as Record<string, unknown>);
|
||||
return Object.entries(flat).map(([k, v]) => `${k}=${v}`).join('\n');
|
||||
}
|
||||
|
||||
/* ── NDJSON / JSONL ── */
|
||||
|
||||
function ndjsonToJson(text: string): unknown[] {
|
||||
return text
|
||||
.split(/\r?\n/)
|
||||
.filter(line => line.trim())
|
||||
.map(line => JSON.parse(line));
|
||||
}
|
||||
|
||||
function jsonToNdjson(data: unknown): string {
|
||||
const arr = Array.isArray(data) ? data : [data];
|
||||
return arr.map(item => JSON.stringify(item)).join('\n');
|
||||
}
|
||||
|
||||
/* ── SQL (write only — generates INSERT statements) ── */
|
||||
|
||||
function jsonToSql(data: unknown): string {
|
||||
const arr = Array.isArray(data) ? data : [data];
|
||||
if (arr.length === 0) return '-- No data';
|
||||
|
||||
const first = arr[0] as Record<string, unknown>;
|
||||
if (typeof first !== 'object' || first === null) {
|
||||
return `-- Data:\n-- ${JSON.stringify(data)}`;
|
||||
}
|
||||
|
||||
const columns = Object.keys(first);
|
||||
const tableName = 'data';
|
||||
const lines: string[] = [
|
||||
`-- Generated by Transmute`,
|
||||
`-- ${arr.length} rows`,
|
||||
'',
|
||||
`CREATE TABLE IF NOT EXISTS "${tableName}" (`,
|
||||
columns.map((col, i) => ` "${col}" TEXT${i < columns.length - 1 ? ',' : ''}`).join('\n'),
|
||||
');',
|
||||
'',
|
||||
];
|
||||
|
||||
for (const row of arr) {
|
||||
const r = row as Record<string, unknown>;
|
||||
const values = columns.map(col => {
|
||||
const v = r[col];
|
||||
if (v === null || v === undefined) return 'NULL';
|
||||
if (typeof v === 'number') return String(v);
|
||||
return `'${String(v).replace(/'/g, "''")}'`;
|
||||
});
|
||||
lines.push(`INSERT INTO "${tableName}" (${columns.map(c => `"${c}"`).join(', ')}) VALUES (${values.join(', ')});`);
|
||||
}
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
function sqlToJson(text: string): unknown {
|
||||
// Basic parser: extract INSERT statements
|
||||
const rows: Record<string, string>[] = [];
|
||||
const insertRegex = /INSERT\s+INTO\s+"?(\w+)"?\s*\(([^)]+)\)\s*VALUES\s*\(([^)]+)\);?/gi;
|
||||
let match;
|
||||
|
||||
while ((match = insertRegex.exec(text)) !== null) {
|
||||
const columns = match[2].split(',').map(c => c.trim().replace(/"/g, ''));
|
||||
const valuesRaw = match[3];
|
||||
// Simple value parsing
|
||||
const values = valuesRaw.split(',').map(v => {
|
||||
v = v.trim();
|
||||
if (v === 'NULL') return '';
|
||||
if ((v.startsWith("'") && v.endsWith("'")) || (v.startsWith('"') && v.endsWith('"'))) {
|
||||
return v.slice(1, -1);
|
||||
}
|
||||
return v;
|
||||
});
|
||||
|
||||
const row: Record<string, string> = {};
|
||||
columns.forEach((col, i) => { row[col] = values[i] || ''; });
|
||||
rows.push(row);
|
||||
}
|
||||
|
||||
return rows.length > 0 ? rows : { raw: text, note: 'Could not parse SQL INSERT statements' };
|
||||
}
|
||||
|
||||
/* ── Intermediate conversion pipeline ── */
|
||||
|
||||
async function toIntermediate(file: File, ext: string): Promise<unknown> {
|
||||
const text = await readFileAsText(file);
|
||||
|
||||
@@ -80,6 +293,17 @@ async function toIntermediate(file: File, ext: string): Promise<unknown> {
|
||||
return yamlToJson(text);
|
||||
case 'toml':
|
||||
return tomlToJson(text);
|
||||
case 'ini':
|
||||
return iniToJson(text);
|
||||
case 'env':
|
||||
return envToJson(text);
|
||||
case 'properties':
|
||||
return propertiesToJson(text);
|
||||
case 'ndjson':
|
||||
case 'jsonl':
|
||||
return ndjsonToJson(text);
|
||||
case 'sql':
|
||||
return sqlToJson(text);
|
||||
default:
|
||||
throw new Error(`Unsupported source format: ${ext}`);
|
||||
}
|
||||
@@ -100,6 +324,17 @@ async function fromIntermediate(data: unknown, targetFormat: string): Promise<st
|
||||
return jsonToYaml(data);
|
||||
case 'toml':
|
||||
return jsonToToml(data);
|
||||
case 'ini':
|
||||
return jsonToIni(data);
|
||||
case 'env':
|
||||
return jsonToEnv(data);
|
||||
case 'properties':
|
||||
return jsonToProperties(data);
|
||||
case 'ndjson':
|
||||
case 'jsonl':
|
||||
return jsonToNdjson(data);
|
||||
case 'sql':
|
||||
return jsonToSql(data);
|
||||
default:
|
||||
throw new Error(`Unsupported target format: ${targetFormat}`);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,352 @@
|
||||
import { ConversionResult } from '@/types';
|
||||
import { buildOutputFilename, getMimeType } from '@/lib/utils';
|
||||
import { getExtension } from '@/lib/fileDetector';
|
||||
|
||||
/**
|
||||
* EPUB converter.
|
||||
* EPUB is a ZIP file containing XHTML content + OPF metadata.
|
||||
* We use JSZip (already installed) to read/write EPUB archives.
|
||||
*
|
||||
* Supported routes:
|
||||
* epub → txt, html, md, pdf
|
||||
* txt, html, md → epub
|
||||
*/
|
||||
|
||||
const EPUB_EXTENSIONS = new Set(['epub']);
|
||||
|
||||
export function isEbookConversion(sourceExt: string, targetFormat: string): boolean {
|
||||
return EPUB_EXTENSIONS.has(sourceExt) || EPUB_EXTENSIONS.has(targetFormat);
|
||||
}
|
||||
|
||||
async function readFileAsText(file: File): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e) => resolve(e.target?.result as string);
|
||||
reader.onerror = () => reject(new Error('Failed to read file'));
|
||||
reader.readAsText(file);
|
||||
});
|
||||
}
|
||||
|
||||
async function readFileAsArrayBuffer(file: File): Promise<ArrayBuffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e) => resolve(e.target?.result as ArrayBuffer);
|
||||
reader.onerror = () => reject(new Error('Failed to read file'));
|
||||
reader.readAsArrayBuffer(file);
|
||||
});
|
||||
}
|
||||
|
||||
function escapeHtml(text: string): string {
|
||||
return text
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"');
|
||||
}
|
||||
|
||||
function stripHtmlTags(html: string): string {
|
||||
return html
|
||||
.replace(/<style[^>]*>[\s\S]*?<\/style>/gi, '')
|
||||
.replace(/<script[^>]*>[\s\S]*?<\/script>/gi, '')
|
||||
.replace(/<[^>]+>/g, ' ')
|
||||
.replace(/\s+/g, ' ')
|
||||
.replace(/ /g, ' ')
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, "'")
|
||||
.trim();
|
||||
}
|
||||
|
||||
function htmlToMarkdown(html: string): string {
|
||||
return html
|
||||
.replace(/<h1[^>]*>(.*?)<\/h1>/gi, '# $1\n\n')
|
||||
.replace(/<h2[^>]*>(.*?)<\/h2>/gi, '## $1\n\n')
|
||||
.replace(/<h3[^>]*>(.*?)<\/h3>/gi, '### $1\n\n')
|
||||
.replace(/<h4[^>]*>(.*?)<\/h4>/gi, '#### $1\n\n')
|
||||
.replace(/<h5[^>]*>(.*?)<\/h5>/gi, '##### $1\n\n')
|
||||
.replace(/<h6[^>]*>(.*?)<\/h6>/gi, '###### $1\n\n')
|
||||
.replace(/<strong[^>]*>(.*?)<\/strong>/gi, '**$1**')
|
||||
.replace(/<b[^>]*>(.*?)<\/b>/gi, '**$1**')
|
||||
.replace(/<em[^>]*>(.*?)<\/em>/gi, '*$1*')
|
||||
.replace(/<i[^>]*>(.*?)<\/i>/gi, '*$1*')
|
||||
.replace(/<a[^>]*href="([^"]*)"[^>]*>(.*?)<\/a>/gi, '[$2]($1)')
|
||||
.replace(/<br\s*\/?>/gi, '\n')
|
||||
.replace(/<p[^>]*>(.*?)<\/p>/gi, '$1\n\n')
|
||||
.replace(/<li[^>]*>(.*?)<\/li>/gi, '- $1\n')
|
||||
.replace(/<[^>]+>/g, '')
|
||||
.replace(/\n{3,}/g, '\n\n')
|
||||
.trim();
|
||||
}
|
||||
|
||||
/* ── Read EPUB ── */
|
||||
|
||||
async function extractEpubContent(buffer: ArrayBuffer): Promise<{ title: string; htmlChapters: string[] }> {
|
||||
const JSZip = (await import('jszip')).default;
|
||||
const zip = await JSZip.loadAsync(buffer);
|
||||
|
||||
let title = 'Untitled';
|
||||
const htmlChapters: string[] = [];
|
||||
|
||||
// Find the OPF file (rootfile)
|
||||
let opfPath = '';
|
||||
const containerXml = await zip.file('META-INF/container.xml')?.async('string');
|
||||
if (containerXml) {
|
||||
const match = containerXml.match(/full-path="([^"]+)"/);
|
||||
if (match) opfPath = match[1];
|
||||
}
|
||||
|
||||
// Parse OPF for spine order
|
||||
if (opfPath) {
|
||||
const opfContent = await zip.file(opfPath)?.async('string');
|
||||
if (opfContent) {
|
||||
// Get title
|
||||
const titleMatch = opfContent.match(/<dc:title[^>]*>([^<]+)<\/dc:title>/i);
|
||||
if (titleMatch) title = titleMatch[1];
|
||||
|
||||
// Get manifest items
|
||||
const manifest: Record<string, string> = {};
|
||||
const itemRegex = /<item[^>]*id="([^"]*)"[^>]*href="([^"]*)"[^>]*media-type="([^"]*)"[^>]*\/?>/gi;
|
||||
let itemMatch;
|
||||
while ((itemMatch = itemRegex.exec(opfContent)) !== null) {
|
||||
manifest[itemMatch[1]] = itemMatch[2];
|
||||
}
|
||||
// Also handle reversed attribute order
|
||||
const itemRegex2 = /<item[^>]*href="([^"]*)"[^>]*id="([^"]*)"[^>]*media-type="([^"]*)"[^>]*\/?>/gi;
|
||||
while ((itemMatch = itemRegex2.exec(opfContent)) !== null) {
|
||||
manifest[itemMatch[2]] = itemMatch[1];
|
||||
}
|
||||
|
||||
// Get spine order
|
||||
const spineIds: string[] = [];
|
||||
const spineRegex = /<itemref[^>]*idref="([^"]*)"[^>]*\/?>/gi;
|
||||
let spineMatch;
|
||||
while ((spineMatch = spineRegex.exec(opfContent)) !== null) {
|
||||
spineIds.push(spineMatch[1]);
|
||||
}
|
||||
|
||||
// Resolve paths relative to OPF directory
|
||||
const opfDir = opfPath.includes('/') ? opfPath.substring(0, opfPath.lastIndexOf('/') + 1) : '';
|
||||
|
||||
for (const id of spineIds) {
|
||||
const href = manifest[id];
|
||||
if (!href) continue;
|
||||
const fullPath = opfDir + href;
|
||||
const content = await zip.file(fullPath)?.async('string');
|
||||
if (content) {
|
||||
// Extract body content
|
||||
const bodyMatch = content.match(/<body[^>]*>([\s\S]*)<\/body>/i);
|
||||
htmlChapters.push(bodyMatch ? bodyMatch[1] : content);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: if no chapters found, scan for any html/xhtml files
|
||||
if (htmlChapters.length === 0) {
|
||||
const htmlFiles = Object.keys(zip.files)
|
||||
.filter(f => /\.(x?html?)$/i.test(f))
|
||||
.sort();
|
||||
for (const f of htmlFiles) {
|
||||
const content = await zip.file(f)?.async('string');
|
||||
if (content) {
|
||||
const bodyMatch = content.match(/<body[^>]*>([\s\S]*)<\/body>/i);
|
||||
htmlChapters.push(bodyMatch ? bodyMatch[1] : content);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { title, htmlChapters };
|
||||
}
|
||||
|
||||
/* ── Write EPUB ── */
|
||||
|
||||
async function createEpubFromHtml(title: string, htmlContent: string): Promise<Blob> {
|
||||
const JSZip = (await import('jszip')).default;
|
||||
const zip = new JSZip();
|
||||
|
||||
const uid = `transmute-${Date.now()}`;
|
||||
|
||||
// mimetype (must be first, uncompressed — JSZip handles this)
|
||||
zip.file('mimetype', 'application/epub+zip');
|
||||
|
||||
// META-INF/container.xml
|
||||
zip.file('META-INF/container.xml', `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<container version="1.0" xmlns="urn:oasis:names:tc:opendocument:xmlns:container">
|
||||
<rootfiles>
|
||||
<rootfile full-path="OEBPS/content.opf" media-type="application/oebps-package+xml"/>
|
||||
</rootfiles>
|
||||
</container>`);
|
||||
|
||||
// OEBPS/content.opf
|
||||
zip.file('OEBPS/content.opf', `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<package xmlns="http://www.idpf.org/2007/opf" unique-identifier="BookId" version="3.0">
|
||||
<metadata xmlns:dc="http://purl.org/dc/elements/1.1/">
|
||||
<dc:identifier id="BookId">${uid}</dc:identifier>
|
||||
<dc:title>${escapeHtml(title)}</dc:title>
|
||||
<dc:language>en</dc:language>
|
||||
<meta property="dcterms:modified">${new Date().toISOString().replace(/\.\d+Z$/, 'Z')}</meta>
|
||||
</metadata>
|
||||
<manifest>
|
||||
<item id="chapter1" href="chapter1.xhtml" media-type="application/xhtml+xml"/>
|
||||
<item id="nav" href="nav.xhtml" media-type="application/xhtml+xml" properties="nav"/>
|
||||
</manifest>
|
||||
<spine>
|
||||
<itemref idref="chapter1"/>
|
||||
</spine>
|
||||
</package>`);
|
||||
|
||||
// Navigation document
|
||||
zip.file('OEBPS/nav.xhtml', `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE html>
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:epub="http://www.idpf.org/2007/ops">
|
||||
<head><title>Navigation</title></head>
|
||||
<body>
|
||||
<nav epub:type="toc">
|
||||
<h1>Table of Contents</h1>
|
||||
<ol><li><a href="chapter1.xhtml">${escapeHtml(title)}</a></li></ol>
|
||||
</nav>
|
||||
</body>
|
||||
</html>`);
|
||||
|
||||
// Chapter content
|
||||
zip.file('OEBPS/chapter1.xhtml', `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE html>
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head><title>${escapeHtml(title)}</title>
|
||||
<style>
|
||||
body { font-family: serif; line-height: 1.6; margin: 1em; }
|
||||
h1, h2, h3 { margin-top: 1.5em; }
|
||||
p { margin: 0.5em 0; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
${htmlContent}
|
||||
</body>
|
||||
</html>`);
|
||||
|
||||
const blob = await zip.generateAsync({
|
||||
type: 'blob',
|
||||
mimeType: 'application/epub+zip',
|
||||
compression: 'DEFLATE',
|
||||
});
|
||||
|
||||
return blob;
|
||||
}
|
||||
|
||||
/* ── Main converter ── */
|
||||
|
||||
export async function convertEbook(
|
||||
file: File,
|
||||
targetFormat: string,
|
||||
onProgress?: (progress: number) => void
|
||||
): Promise<ConversionResult> {
|
||||
onProgress?.(10);
|
||||
|
||||
const sourceExt = getExtension(file.name);
|
||||
let resultBlob: Blob;
|
||||
|
||||
if (sourceExt === 'epub') {
|
||||
// EPUB → other format
|
||||
const buffer = await readFileAsArrayBuffer(file);
|
||||
onProgress?.(30);
|
||||
|
||||
const { title, htmlChapters } = await extractEpubContent(buffer);
|
||||
const fullHtml = htmlChapters.join('\n<hr/>\n');
|
||||
onProgress?.(60);
|
||||
|
||||
switch (targetFormat) {
|
||||
case 'txt': {
|
||||
const text = stripHtmlTags(fullHtml);
|
||||
resultBlob = new Blob([text], { type: getMimeType('txt') });
|
||||
break;
|
||||
}
|
||||
case 'html': {
|
||||
const styledHtml = `<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head><meta charset="utf-8"><title>${escapeHtml(title)}</title>
|
||||
<style>body{font-family:serif;line-height:1.7;max-width:700px;margin:40px auto;padding:0 20px;color:#1a1a1a}h1,h2,h3{margin-top:1.5em}</style>
|
||||
</head><body><h1>${escapeHtml(title)}</h1>${fullHtml}</body></html>`;
|
||||
resultBlob = new Blob([styledHtml], { type: getMimeType('html') });
|
||||
break;
|
||||
}
|
||||
case 'md': {
|
||||
const md = `# ${title}\n\n` + htmlToMarkdown(fullHtml);
|
||||
resultBlob = new Blob([md], { type: getMimeType('md') });
|
||||
break;
|
||||
}
|
||||
case 'pdf': {
|
||||
// Generate HTML then render to PDF via jspdf
|
||||
const { jsPDF } = await import('jspdf');
|
||||
const doc = new jsPDF({ unit: 'mm', format: 'a4' });
|
||||
const plainText = stripHtmlTags(fullHtml);
|
||||
const lines = doc.splitTextToSize(plainText, 170);
|
||||
const pageHeight = 280;
|
||||
let y = 20;
|
||||
|
||||
// Title
|
||||
doc.setFontSize(18);
|
||||
doc.text(title, 20, y);
|
||||
y += 12;
|
||||
doc.setFontSize(11);
|
||||
|
||||
for (const line of lines) {
|
||||
if (y > pageHeight) {
|
||||
doc.addPage();
|
||||
y = 20;
|
||||
}
|
||||
doc.text(line, 20, y);
|
||||
y += 6;
|
||||
}
|
||||
|
||||
resultBlob = doc.output('blob');
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unsupported: epub → ${targetFormat}`);
|
||||
}
|
||||
} else {
|
||||
// Other format → EPUB
|
||||
const text = await readFileAsText(file);
|
||||
onProgress?.(30);
|
||||
|
||||
const title = file.name.replace(/\.[^.]+$/, '');
|
||||
let htmlContent: string;
|
||||
|
||||
switch (sourceExt) {
|
||||
case 'txt': {
|
||||
htmlContent = text
|
||||
.split(/\n\n+/)
|
||||
.filter(Boolean)
|
||||
.map(p => `<p>${escapeHtml(p)}</p>`)
|
||||
.join('\n');
|
||||
break;
|
||||
}
|
||||
case 'html':
|
||||
case 'htm': {
|
||||
// Extract body if full document
|
||||
const bodyMatch = text.match(/<body[^>]*>([\s\S]*)<\/body>/i);
|
||||
htmlContent = bodyMatch ? bodyMatch[1] : text;
|
||||
break;
|
||||
}
|
||||
case 'md': {
|
||||
const { marked } = await import('marked');
|
||||
htmlContent = await marked.parse(text, { breaks: true, gfm: true });
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unsupported: ${sourceExt} → epub`);
|
||||
}
|
||||
|
||||
onProgress?.(60);
|
||||
resultBlob = await createEpubFromHtml(title, htmlContent);
|
||||
}
|
||||
|
||||
onProgress?.(100);
|
||||
|
||||
return {
|
||||
blob: resultBlob,
|
||||
filename: buildOutputFilename(file.name, targetFormat),
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,289 @@
|
||||
import { ConversionResult } from '@/types';
|
||||
import { buildOutputFilename, getMimeType } from '@/lib/utils';
|
||||
import { getExtension } from '@/lib/fileDetector';
|
||||
|
||||
/**
|
||||
* Font converter.
|
||||
* Uses opentype.js for parsing/writing TTF/OTF/WOFF.
|
||||
* Uses woff2-encoder for WOFF2 compress/decompress (browser-compatible WebAssembly).
|
||||
*
|
||||
* Supported routes:
|
||||
* ttf ↔ otf, woff, woff2
|
||||
* otf ↔ ttf, woff, woff2
|
||||
* woff → ttf, otf, woff2
|
||||
* woff2 → ttf, otf, woff
|
||||
*/
|
||||
|
||||
const FONT_EXTENSIONS = new Set(['ttf', 'otf', 'woff', 'woff2']);
|
||||
|
||||
export function isFontConversion(sourceExt: string, targetFormat: string): boolean {
|
||||
return FONT_EXTENSIONS.has(sourceExt) || FONT_EXTENSIONS.has(targetFormat);
|
||||
}
|
||||
|
||||
async function readFileAsArrayBuffer(file: File): Promise<ArrayBuffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e) => resolve(e.target?.result as ArrayBuffer);
|
||||
reader.onerror = () => reject(new Error('Failed to read file'));
|
||||
reader.readAsArrayBuffer(file);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get raw TTF/OTF (SFNT) bytes from any font format.
|
||||
* - TTF/OTF: already raw SFNT
|
||||
* - WOFF: parsed by opentype.js, re-exported as ArrayBuffer
|
||||
* - WOFF2: decompressed via wawoff2
|
||||
*/
|
||||
async function getFontAsSfnt(buffer: ArrayBuffer, ext: string): Promise<ArrayBuffer> {
|
||||
if (ext === 'woff2') {
|
||||
const woff2 = await import('woff2-encoder');
|
||||
const decompressed = await woff2.decompress(new Uint8Array(buffer));
|
||||
return decompressed.buffer as ArrayBuffer;
|
||||
}
|
||||
|
||||
if (ext === 'woff') {
|
||||
// opentype.js can parse WOFF and export as raw SFNT
|
||||
const opentype = await import('opentype.js');
|
||||
const font = opentype.parse(buffer);
|
||||
return font.toArrayBuffer();
|
||||
}
|
||||
|
||||
// TTF/OTF are already SFNT
|
||||
return buffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the SFNT flavor (truetype vs cff) by reading the first 4 bytes.
|
||||
* - 0x00010000 or 'true' = TrueType (TTF)
|
||||
* - 'OTTO' = CFF/OpenType (OTF)
|
||||
*/
|
||||
function getSfntFlavor(buffer: ArrayBuffer): 'ttf' | 'otf' {
|
||||
const view = new DataView(buffer);
|
||||
const tag = view.getUint32(0);
|
||||
// OTTO = 0x4F54544F
|
||||
if (tag === 0x4F54544F) return 'otf';
|
||||
return 'ttf';
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert SFNT buffer to WOFF1 format.
|
||||
* WOFF1 is a compressed wrapper around SFNT table data using zlib/deflate.
|
||||
*
|
||||
* Since browsers don't expose raw deflate, we use the CompressionStream API
|
||||
* (available in modern browsers) for WOFF1 compression.
|
||||
*/
|
||||
async function sfntToWoff(sfntBuffer: ArrayBuffer): Promise<ArrayBuffer> {
|
||||
const sfnt = new DataView(sfntBuffer);
|
||||
const sfntArray = new Uint8Array(sfntBuffer);
|
||||
|
||||
// Read SFNT header
|
||||
const sfntTag = sfnt.getUint32(0); // flavor
|
||||
const numTables = sfnt.getUint16(4);
|
||||
|
||||
// Parse table directory
|
||||
interface TableEntry {
|
||||
tag: number;
|
||||
checksum: number;
|
||||
offset: number;
|
||||
length: number;
|
||||
}
|
||||
|
||||
const tables: TableEntry[] = [];
|
||||
for (let i = 0; i < numTables; i++) {
|
||||
const dirOffset = 12 + i * 16;
|
||||
tables.push({
|
||||
tag: sfnt.getUint32(dirOffset),
|
||||
checksum: sfnt.getUint32(dirOffset + 4),
|
||||
offset: sfnt.getUint32(dirOffset + 8),
|
||||
length: sfnt.getUint32(dirOffset + 12),
|
||||
});
|
||||
}
|
||||
|
||||
// Sort tables by tag for WOFF spec compliance
|
||||
tables.sort((a, b) => a.tag - b.tag);
|
||||
|
||||
// Compress each table
|
||||
interface WoffTableEntry {
|
||||
tag: number;
|
||||
origLength: number;
|
||||
compLength: number;
|
||||
origChecksum: number;
|
||||
data: Uint8Array;
|
||||
}
|
||||
|
||||
const woffTables: WoffTableEntry[] = [];
|
||||
|
||||
for (const table of tables) {
|
||||
const origData = sfntArray.slice(table.offset, table.offset + table.length);
|
||||
|
||||
// Try to compress using CompressionStream API
|
||||
let compData: Uint8Array;
|
||||
try {
|
||||
const cs = new CompressionStream('deflate');
|
||||
const writer = cs.writable.getWriter();
|
||||
const reader = cs.readable.getReader();
|
||||
|
||||
const chunks: Uint8Array[] = [];
|
||||
const readPromise = (async () => {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
chunks.push(value);
|
||||
}
|
||||
})();
|
||||
|
||||
writer.write(origData);
|
||||
writer.close();
|
||||
await readPromise;
|
||||
|
||||
const totalLength = chunks.reduce((sum, c) => sum + c.length, 0);
|
||||
compData = new Uint8Array(totalLength);
|
||||
let offset = 0;
|
||||
for (const chunk of chunks) {
|
||||
compData.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
} catch {
|
||||
// If CompressionStream not available, store uncompressed
|
||||
compData = origData;
|
||||
}
|
||||
|
||||
// Only use compressed if it's actually smaller
|
||||
if (compData.length >= origData.length) {
|
||||
compData = origData;
|
||||
}
|
||||
|
||||
woffTables.push({
|
||||
tag: table.tag,
|
||||
origLength: table.length,
|
||||
compLength: compData.length,
|
||||
origChecksum: table.checksum,
|
||||
data: compData,
|
||||
});
|
||||
}
|
||||
|
||||
// Calculate sizes
|
||||
const WOFF_HEADER_SIZE = 44;
|
||||
const TABLE_DIR_SIZE = 20 * numTables;
|
||||
let dataOffset = WOFF_HEADER_SIZE + TABLE_DIR_SIZE;
|
||||
|
||||
// Align data offset to 4 bytes
|
||||
dataOffset = (dataOffset + 3) & ~3;
|
||||
|
||||
let totalSize = dataOffset;
|
||||
for (const t of woffTables) {
|
||||
totalSize += t.data.length;
|
||||
totalSize = (totalSize + 3) & ~3; // 4-byte align
|
||||
}
|
||||
|
||||
const woffBuffer = new ArrayBuffer(totalSize);
|
||||
const woff = new DataView(woffBuffer);
|
||||
const woffBytes = new Uint8Array(woffBuffer);
|
||||
|
||||
// WOFF header
|
||||
woff.setUint32(0, 0x774F4646); // 'wOFF'
|
||||
woff.setUint32(4, sfntTag); // flavor
|
||||
woff.setUint32(8, totalSize); // length
|
||||
woff.setUint16(12, numTables); // numTables
|
||||
woff.setUint16(14, 0); // reserved
|
||||
woff.setUint32(16, sfntBuffer.byteLength); // totalSfntSize
|
||||
woff.setUint16(20, 1); // majorVersion
|
||||
woff.setUint16(22, 0); // minorVersion
|
||||
woff.setUint32(24, 0); // metaOffset
|
||||
woff.setUint32(28, 0); // metaLength
|
||||
woff.setUint32(32, 0); // metaOrigLength
|
||||
woff.setUint32(36, 0); // privOffset
|
||||
woff.setUint32(40, 0); // privLength
|
||||
|
||||
// Table directory + data
|
||||
let currentDataOffset = dataOffset;
|
||||
for (let i = 0; i < woffTables.length; i++) {
|
||||
const t = woffTables[i];
|
||||
const dirEntry = WOFF_HEADER_SIZE + i * 20;
|
||||
|
||||
woff.setUint32(dirEntry, t.tag);
|
||||
woff.setUint32(dirEntry + 4, currentDataOffset);
|
||||
woff.setUint32(dirEntry + 8, t.compLength);
|
||||
woff.setUint32(dirEntry + 12, t.origLength);
|
||||
woff.setUint32(dirEntry + 16, t.origChecksum);
|
||||
|
||||
woffBytes.set(t.data, currentDataOffset);
|
||||
currentDataOffset += t.data.length;
|
||||
currentDataOffset = (currentDataOffset + 3) & ~3; // 4-byte align
|
||||
}
|
||||
|
||||
return woffBuffer;
|
||||
}
|
||||
|
||||
/* ── Main converter ── */
|
||||
|
||||
export async function convertFont(
|
||||
file: File,
|
||||
targetFormat: string,
|
||||
onProgress?: (progress: number) => void
|
||||
): Promise<ConversionResult> {
|
||||
onProgress?.(10);
|
||||
|
||||
const sourceExt = getExtension(file.name);
|
||||
const buffer = await readFileAsArrayBuffer(file);
|
||||
onProgress?.(20);
|
||||
|
||||
// Step 1: Get raw SFNT (TTF/OTF) bytes
|
||||
const sfntBuffer = await getFontAsSfnt(buffer, sourceExt);
|
||||
onProgress?.(50);
|
||||
|
||||
const flavor = getSfntFlavor(sfntBuffer);
|
||||
let resultBlob: Blob;
|
||||
|
||||
switch (targetFormat) {
|
||||
case 'ttf': {
|
||||
if (flavor === 'otf') {
|
||||
// CFF → TrueType: opentype.js doesn't convert outlines,
|
||||
// but the raw SFNT is still valid and most apps handle it.
|
||||
// We re-export via opentype.js to ensure valid structure.
|
||||
const opentype = await import('opentype.js');
|
||||
const font = opentype.parse(sfntBuffer);
|
||||
const outBuffer = font.toArrayBuffer();
|
||||
resultBlob = new Blob([outBuffer], { type: getMimeType('ttf') });
|
||||
} else {
|
||||
resultBlob = new Blob([sfntBuffer], { type: getMimeType('ttf') });
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'otf': {
|
||||
// Similar to TTF — we output the SFNT data.
|
||||
// True CFF↔TrueType outline conversion is extremely complex.
|
||||
// We keep the original outlines but wrap in the requested container.
|
||||
const opentype = await import('opentype.js');
|
||||
const font = opentype.parse(sfntBuffer);
|
||||
const outBuffer = font.toArrayBuffer();
|
||||
resultBlob = new Blob([outBuffer], { type: getMimeType('otf') });
|
||||
break;
|
||||
}
|
||||
|
||||
case 'woff': {
|
||||
const woffBuffer = await sfntToWoff(sfntBuffer);
|
||||
resultBlob = new Blob([woffBuffer], { type: getMimeType('woff') });
|
||||
break;
|
||||
}
|
||||
|
||||
case 'woff2': {
|
||||
const woff2 = await import('woff2-encoder');
|
||||
const compressed = await woff2.compress(new Uint8Array(sfntBuffer));
|
||||
resultBlob = new Blob([compressed.buffer as ArrayBuffer], { type: getMimeType('woff2') });
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unsupported font conversion: ${sourceExt} → ${targetFormat}`);
|
||||
}
|
||||
|
||||
onProgress?.(100);
|
||||
|
||||
return {
|
||||
blob: resultBlob,
|
||||
filename: buildOutputFilename(file.name, targetFormat),
|
||||
};
|
||||
}
|
||||
@@ -178,6 +178,37 @@ export async function convertImage(
|
||||
): Promise<ConversionResult> {
|
||||
onProgress?.(10);
|
||||
|
||||
// HEIC/HEIF: decode to PNG blob first using heic2any, then continue as normal
|
||||
let inputFile = file;
|
||||
const ext = file.name.split('.').pop()?.toLowerCase() || '';
|
||||
if (ext === 'heic' || ext === 'heif') {
|
||||
const heic2any = (await import('heic2any')).default;
|
||||
const pngBlob = await heic2any({ blob: file, toType: 'image/png' }) as Blob;
|
||||
inputFile = new File([pngBlob], file.name.replace(/\.(heic|heif)$/i, '.png'), { type: 'image/png' });
|
||||
onProgress?.(30);
|
||||
}
|
||||
|
||||
// PSD: decode to PNG blob using ag-psd, then continue as normal
|
||||
if (ext === 'psd') {
|
||||
const { readPsd } = await import('ag-psd');
|
||||
const buffer = await file.arrayBuffer();
|
||||
const psd = readPsd(buffer);
|
||||
onProgress?.(30);
|
||||
|
||||
// ag-psd puts the composite image on psd.canvas when running in browser
|
||||
if (psd.canvas) {
|
||||
const pngBlob = await new Promise<Blob>((resolve, reject) => {
|
||||
psd.canvas!.toBlob((blob) => {
|
||||
if (blob) resolve(blob);
|
||||
else reject(new Error('Failed to render PSD to canvas'));
|
||||
}, 'image/png');
|
||||
});
|
||||
inputFile = new File([pngBlob], file.name.replace(/\.psd$/i, '.png'), { type: 'image/png' });
|
||||
} else {
|
||||
throw new Error('Failed to decode PSD file — no composite image found');
|
||||
}
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const img = new Image();
|
||||
img.crossOrigin = 'anonymous';
|
||||
@@ -270,6 +301,6 @@ export async function convertImage(
|
||||
img.src = e.target?.result as string;
|
||||
};
|
||||
reader.onerror = () => reject(new Error('Failed to read file'));
|
||||
reader.readAsDataURL(file);
|
||||
reader.readAsDataURL(inputFile);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -0,0 +1,315 @@
|
||||
import { ConversionResult } from '@/types';
|
||||
import { buildOutputFilename, getMimeType } from '@/lib/utils';
|
||||
import { getExtension } from '@/lib/fileDetector';
|
||||
|
||||
/**
|
||||
* Presentation converter.
|
||||
* Uses pptxgenjs for writing .pptx files.
|
||||
* Uses jszip for reading .pptx files (extract text from XML).
|
||||
*
|
||||
* Supported routes:
|
||||
* txt, md, html → pptx (generate slides)
|
||||
* pptx → txt, html, pdf
|
||||
*/
|
||||
|
||||
const PPTX_EXTENSIONS = new Set(['pptx']);
|
||||
|
||||
export function isPresentationConversion(sourceExt: string, targetFormat: string): boolean {
|
||||
return PPTX_EXTENSIONS.has(sourceExt) || PPTX_EXTENSIONS.has(targetFormat);
|
||||
}
|
||||
|
||||
async function readFileAsText(file: File): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e) => resolve(e.target?.result as string);
|
||||
reader.onerror = () => reject(new Error('Failed to read file'));
|
||||
reader.readAsText(file);
|
||||
});
|
||||
}
|
||||
|
||||
async function readFileAsArrayBuffer(file: File): Promise<ArrayBuffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e) => resolve(e.target?.result as ArrayBuffer);
|
||||
reader.onerror = () => reject(new Error('Failed to read file'));
|
||||
reader.readAsArrayBuffer(file);
|
||||
});
|
||||
}
|
||||
|
||||
function escapeHtml(text: string): string {
|
||||
return text
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"');
|
||||
}
|
||||
|
||||
function stripHtmlTags(html: string): string {
|
||||
return html
|
||||
.replace(/<style[^>]*>[\s\S]*?<\/style>/gi, '')
|
||||
.replace(/<script[^>]*>[\s\S]*?<\/script>/gi, '')
|
||||
.replace(/<[^>]+>/g, ' ')
|
||||
.replace(/\s+/g, ' ')
|
||||
.replace(/ /g, ' ')
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.trim();
|
||||
}
|
||||
|
||||
/* ── Read PPTX (extract text from slides) ── */
|
||||
|
||||
async function extractPptxText(buffer: ArrayBuffer): Promise<string[]> {
|
||||
const JSZip = (await import('jszip')).default;
|
||||
const zip = await JSZip.loadAsync(buffer);
|
||||
|
||||
const slideTexts: string[] = [];
|
||||
const slideFiles = Object.keys(zip.files)
|
||||
.filter(f => /^ppt\/slides\/slide\d+\.xml$/i.test(f))
|
||||
.sort((a, b) => {
|
||||
const numA = parseInt(a.match(/slide(\d+)/)?.[1] || '0');
|
||||
const numB = parseInt(b.match(/slide(\d+)/)?.[1] || '0');
|
||||
return numA - numB;
|
||||
});
|
||||
|
||||
for (const slideFile of slideFiles) {
|
||||
const content = await zip.file(slideFile)?.async('string');
|
||||
if (!content) continue;
|
||||
|
||||
// Extract text from <a:t> tags (PowerPoint text elements)
|
||||
const texts: string[] = [];
|
||||
const textRegex = /<a:t>([^<]*)<\/a:t>/g;
|
||||
let match;
|
||||
while ((match = textRegex.exec(content)) !== null) {
|
||||
if (match[1].trim()) texts.push(match[1]);
|
||||
}
|
||||
if (texts.length > 0) {
|
||||
slideTexts.push(texts.join(' '));
|
||||
}
|
||||
}
|
||||
|
||||
return slideTexts;
|
||||
}
|
||||
|
||||
/* ── Write PPTX ── */
|
||||
|
||||
interface SlideContent {
|
||||
title: string;
|
||||
body: string;
|
||||
}
|
||||
|
||||
function splitTextIntoSlides(text: string, title: string): SlideContent[] {
|
||||
const slides: SlideContent[] = [];
|
||||
|
||||
// Split on double newlines or specific markers
|
||||
const sections = text.split(/\n{2,}/);
|
||||
let currentSlide: SlideContent = { title, body: '' };
|
||||
let bodyLines: string[] = [];
|
||||
|
||||
for (const section of sections) {
|
||||
const trimmed = section.trim();
|
||||
if (!trimmed) continue;
|
||||
|
||||
// If accumulated body is getting long, start a new slide
|
||||
if (bodyLines.join('\n').length > 500) {
|
||||
currentSlide.body = bodyLines.join('\n');
|
||||
slides.push(currentSlide);
|
||||
bodyLines = [];
|
||||
currentSlide = { title: '', body: '' };
|
||||
}
|
||||
|
||||
bodyLines.push(trimmed);
|
||||
}
|
||||
|
||||
if (bodyLines.length > 0) {
|
||||
currentSlide.body = bodyLines.join('\n');
|
||||
slides.push(currentSlide);
|
||||
}
|
||||
|
||||
// If first slide has no title, use the provided title
|
||||
if (slides.length > 0 && !slides[0].title) {
|
||||
slides[0].title = title;
|
||||
}
|
||||
|
||||
return slides.length > 0 ? slides : [{ title, body: text }];
|
||||
}
|
||||
|
||||
function splitMarkdownIntoSlides(mdText: string): SlideContent[] {
|
||||
const slides: SlideContent[] = [];
|
||||
// Split on headings (# or ##)
|
||||
const sections = mdText.split(/^(?=#{1,2}\s)/m);
|
||||
|
||||
for (const section of sections) {
|
||||
const trimmed = section.trim();
|
||||
if (!trimmed) continue;
|
||||
|
||||
const headingMatch = trimmed.match(/^#{1,2}\s+(.*)/m);
|
||||
const title = headingMatch ? headingMatch[1].trim() : '';
|
||||
const body = headingMatch
|
||||
? trimmed.substring(headingMatch[0].length).trim()
|
||||
: trimmed;
|
||||
|
||||
// Clean markdown syntax for plain text in slides
|
||||
const cleanBody = body
|
||||
.replace(/^#{1,6}\s+/gm, '')
|
||||
.replace(/\*\*(.*?)\*\*/g, '$1')
|
||||
.replace(/\*(.*?)\*/g, '$1')
|
||||
.replace(/`([^`]+)`/g, '$1')
|
||||
.replace(/\[([^\]]+)\]\([^)]+\)/g, '$1')
|
||||
.replace(/^[-*]\s+/gm, ' - ')
|
||||
.replace(/^\d+\.\s+/gm, (m) => ' ' + m);
|
||||
|
||||
slides.push({ title: title || 'Slide', body: cleanBody });
|
||||
}
|
||||
|
||||
return slides.length > 0 ? slides : [{ title: 'Untitled', body: mdText }];
|
||||
}
|
||||
|
||||
async function createPptx(slides: SlideContent[]): Promise<Blob> {
|
||||
const PptxGenJS = (await import('pptxgenjs')).default;
|
||||
const pptx = new PptxGenJS();
|
||||
|
||||
pptx.layout = 'LAYOUT_WIDE';
|
||||
pptx.author = 'Transmute';
|
||||
|
||||
for (const slide of slides) {
|
||||
const s = pptx.addSlide();
|
||||
|
||||
if (slide.title) {
|
||||
s.addText(slide.title, {
|
||||
x: 0.5,
|
||||
y: 0.3,
|
||||
w: '90%',
|
||||
h: 1,
|
||||
fontSize: 28,
|
||||
bold: true,
|
||||
color: '2d1f14',
|
||||
fontFace: 'Arial',
|
||||
});
|
||||
}
|
||||
|
||||
if (slide.body) {
|
||||
// Truncate very long bodies
|
||||
const bodyText = slide.body.length > 2000 ? slide.body.slice(0, 2000) + '...' : slide.body;
|
||||
s.addText(bodyText, {
|
||||
x: 0.5,
|
||||
y: slide.title ? 1.5 : 0.5,
|
||||
w: '90%',
|
||||
h: slide.title ? 5.5 : 6.5,
|
||||
fontSize: 14,
|
||||
color: '4a3728',
|
||||
fontFace: 'Arial',
|
||||
valign: 'top',
|
||||
paraSpaceAfter: 8,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const output = await pptx.write({ outputType: 'arraybuffer' }) as ArrayBuffer;
|
||||
return new Blob([output], { type: getMimeType('pptx') });
|
||||
}
|
||||
|
||||
/* ── Main converter ── */
|
||||
|
||||
export async function convertPresentation(
|
||||
file: File,
|
||||
targetFormat: string,
|
||||
onProgress?: (progress: number) => void
|
||||
): Promise<ConversionResult> {
|
||||
onProgress?.(10);
|
||||
|
||||
const sourceExt = getExtension(file.name);
|
||||
let resultBlob: Blob;
|
||||
const title = file.name.replace(/\.[^.]+$/, '');
|
||||
|
||||
if (sourceExt === 'pptx') {
|
||||
// PPTX → other format
|
||||
const buffer = await readFileAsArrayBuffer(file);
|
||||
onProgress?.(30);
|
||||
|
||||
const slideTexts = await extractPptxText(buffer);
|
||||
onProgress?.(60);
|
||||
|
||||
switch (targetFormat) {
|
||||
case 'txt': {
|
||||
const fullText = slideTexts
|
||||
.map((text, i) => `--- Slide ${i + 1} ---\n${text}`)
|
||||
.join('\n\n');
|
||||
resultBlob = new Blob([fullText], { type: getMimeType('txt') });
|
||||
break;
|
||||
}
|
||||
case 'html': {
|
||||
const slidesHtml = slideTexts
|
||||
.map((text, i) => `<section><h2>Slide ${i + 1}</h2><p>${escapeHtml(text)}</p></section>`)
|
||||
.join('\n');
|
||||
const fullHtml = `<!DOCTYPE html><html><head><meta charset="utf-8"><title>${escapeHtml(title)}</title>
|
||||
<style>body{font-family:sans-serif;max-width:800px;margin:40px auto;padding:0 20px}section{margin:2em 0;padding:1em;border:1px solid #e5e5e5;border-radius:8px}h2{color:#333}</style>
|
||||
</head><body><h1>${escapeHtml(title)}</h1>${slidesHtml}</body></html>`;
|
||||
resultBlob = new Blob([fullHtml], { type: getMimeType('html') });
|
||||
break;
|
||||
}
|
||||
case 'pdf': {
|
||||
const { jsPDF } = await import('jspdf');
|
||||
const doc = new jsPDF({ unit: 'mm', format: 'a4' });
|
||||
const fullText = slideTexts.join('\n\n');
|
||||
const lines = doc.splitTextToSize(fullText, 170);
|
||||
let y = 20;
|
||||
doc.setFontSize(18);
|
||||
doc.text(title, 20, y);
|
||||
y += 12;
|
||||
doc.setFontSize(11);
|
||||
for (const line of lines) {
|
||||
if (y > 280) { doc.addPage(); y = 20; }
|
||||
doc.text(line, 20, y);
|
||||
y += 6;
|
||||
}
|
||||
resultBlob = doc.output('blob');
|
||||
break;
|
||||
}
|
||||
case 'md': {
|
||||
const md = slideTexts
|
||||
.map((text, i) => `## Slide ${i + 1}\n\n${text}`)
|
||||
.join('\n\n---\n\n');
|
||||
resultBlob = new Blob([`# ${title}\n\n${md}`], { type: getMimeType('md') });
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unsupported: pptx → ${targetFormat}`);
|
||||
}
|
||||
} else {
|
||||
// Other format → PPTX
|
||||
let slides: SlideContent[];
|
||||
|
||||
switch (sourceExt) {
|
||||
case 'txt': {
|
||||
const text = await readFileAsText(file);
|
||||
slides = splitTextIntoSlides(text, title);
|
||||
break;
|
||||
}
|
||||
case 'md': {
|
||||
const text = await readFileAsText(file);
|
||||
slides = splitMarkdownIntoSlides(text);
|
||||
break;
|
||||
}
|
||||
case 'html':
|
||||
case 'htm': {
|
||||
const html = await readFileAsText(file);
|
||||
const text = stripHtmlTags(html);
|
||||
slides = splitTextIntoSlides(text, title);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unsupported: ${sourceExt} → pptx`);
|
||||
}
|
||||
|
||||
onProgress?.(50);
|
||||
resultBlob = await createPptx(slides);
|
||||
}
|
||||
|
||||
onProgress?.(100);
|
||||
|
||||
return {
|
||||
blob: resultBlob,
|
||||
filename: buildOutputFilename(file.name, targetFormat),
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,179 @@
|
||||
import { ConversionResult } from '@/types';
|
||||
import { buildOutputFilename, getMimeType } from '@/lib/utils';
|
||||
import { getExtension } from '@/lib/fileDetector';
|
||||
|
||||
/**
|
||||
* Spreadsheet converter using SheetJS (xlsx).
|
||||
* Handles xlsx, xls, ods → csv, json, tsv, xml, yaml, toml, ods, xlsx, html, txt
|
||||
* Also handles csv/json/tsv → xlsx and similar data-to-spreadsheet routes.
|
||||
*/
|
||||
|
||||
const SPREADSHEET_EXTENSIONS = new Set(['xlsx', 'xls', 'ods']);
|
||||
|
||||
/** Check if this conversion involves a spreadsheet format */
|
||||
export function isSpreadsheetConversion(sourceExt: string, targetFormat: string): boolean {
|
||||
return SPREADSHEET_EXTENSIONS.has(sourceExt) || SPREADSHEET_EXTENSIONS.has(targetFormat);
|
||||
}
|
||||
|
||||
async function readFileAsArrayBuffer(file: File): Promise<ArrayBuffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e) => resolve(e.target?.result as ArrayBuffer);
|
||||
reader.onerror = () => reject(new Error('Failed to read file'));
|
||||
reader.readAsArrayBuffer(file);
|
||||
});
|
||||
}
|
||||
|
||||
async function readFileAsText(file: File): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e) => resolve(e.target?.result as string);
|
||||
reader.onerror = () => reject(new Error('Failed to read file'));
|
||||
reader.readAsText(file);
|
||||
});
|
||||
}
|
||||
|
||||
export async function convertSpreadsheet(
|
||||
file: File,
|
||||
targetFormat: string,
|
||||
onProgress?: (progress: number) => void
|
||||
): Promise<ConversionResult> {
|
||||
onProgress?.(10);
|
||||
|
||||
// Lazy-load SheetJS
|
||||
const XLSX = await import('xlsx');
|
||||
onProgress?.(20);
|
||||
|
||||
const sourceExt = getExtension(file.name);
|
||||
let workbook: ReturnType<typeof XLSX.read>;
|
||||
|
||||
// Parse the source file into a workbook
|
||||
if (SPREADSHEET_EXTENSIONS.has(sourceExt)) {
|
||||
// Binary spreadsheet format — read as ArrayBuffer
|
||||
const buffer = await readFileAsArrayBuffer(file);
|
||||
workbook = XLSX.read(buffer, { type: 'array' });
|
||||
} else if (sourceExt === 'csv') {
|
||||
const text = await readFileAsText(file);
|
||||
workbook = XLSX.read(text, { type: 'string' });
|
||||
} else if (sourceExt === 'tsv') {
|
||||
const text = await readFileAsText(file);
|
||||
workbook = XLSX.read(text, { type: 'string', FS: '\t' });
|
||||
} else if (sourceExt === 'json') {
|
||||
const text = await readFileAsText(file);
|
||||
const data = JSON.parse(text);
|
||||
const arr = Array.isArray(data) ? data : [data];
|
||||
workbook = XLSX.utils.book_new();
|
||||
const ws = XLSX.utils.json_to_sheet(arr);
|
||||
XLSX.utils.book_append_sheet(workbook, ws, 'Sheet1');
|
||||
} else if (sourceExt === 'xml') {
|
||||
const text = await readFileAsText(file);
|
||||
// Try parsing as SpreadsheetML, fallback to generic
|
||||
try {
|
||||
workbook = XLSX.read(text, { type: 'string' });
|
||||
} catch {
|
||||
// Wrap raw XML as single-cell sheet
|
||||
workbook = XLSX.utils.book_new();
|
||||
const ws = XLSX.utils.aoa_to_sheet([['XML Content'], [text]]);
|
||||
XLSX.utils.book_append_sheet(workbook, ws, 'Sheet1');
|
||||
}
|
||||
} else if (sourceExt === 'yaml' || sourceExt === 'yml') {
|
||||
const yaml = (await import('js-yaml')).default;
|
||||
const text = await readFileAsText(file);
|
||||
const data = yaml.load(text);
|
||||
const arr = Array.isArray(data) ? data : [data];
|
||||
workbook = XLSX.utils.book_new();
|
||||
const ws = XLSX.utils.json_to_sheet(arr as object[]);
|
||||
XLSX.utils.book_append_sheet(workbook, ws, 'Sheet1');
|
||||
} else if (sourceExt === 'toml') {
|
||||
const TOML = await import('smol-toml');
|
||||
const text = await readFileAsText(file);
|
||||
const data = TOML.parse(text);
|
||||
const arr = Array.isArray(data) ? data : [data];
|
||||
workbook = XLSX.utils.book_new();
|
||||
const ws = XLSX.utils.json_to_sheet(arr as object[]);
|
||||
XLSX.utils.book_append_sheet(workbook, ws, 'Sheet1');
|
||||
} else {
|
||||
throw new Error(`Unsupported source format for spreadsheet conversion: ${sourceExt}`);
|
||||
}
|
||||
|
||||
onProgress?.(60);
|
||||
|
||||
// Convert workbook to target format
|
||||
let blob: Blob;
|
||||
const firstSheet = workbook.Sheets[workbook.SheetNames[0]];
|
||||
|
||||
switch (targetFormat) {
|
||||
case 'xlsx': {
|
||||
const out = XLSX.write(workbook, { bookType: 'xlsx', type: 'array' });
|
||||
blob = new Blob([out], { type: getMimeType('xlsx') });
|
||||
break;
|
||||
}
|
||||
case 'xls': {
|
||||
const out = XLSX.write(workbook, { bookType: 'xls', type: 'array' });
|
||||
blob = new Blob([out], { type: getMimeType('xls') });
|
||||
break;
|
||||
}
|
||||
case 'ods': {
|
||||
const out = XLSX.write(workbook, { bookType: 'ods', type: 'array' });
|
||||
blob = new Blob([out], { type: getMimeType('ods') });
|
||||
break;
|
||||
}
|
||||
case 'csv': {
|
||||
const csvText = XLSX.utils.sheet_to_csv(firstSheet);
|
||||
blob = new Blob([csvText], { type: getMimeType('csv') });
|
||||
break;
|
||||
}
|
||||
case 'tsv': {
|
||||
const tsvText = XLSX.utils.sheet_to_csv(firstSheet, { FS: '\t' });
|
||||
blob = new Blob([tsvText], { type: getMimeType('tsv') });
|
||||
break;
|
||||
}
|
||||
case 'json': {
|
||||
const jsonData = XLSX.utils.sheet_to_json(firstSheet);
|
||||
const jsonText = JSON.stringify(jsonData, null, 2);
|
||||
blob = new Blob([jsonText], { type: getMimeType('json') });
|
||||
break;
|
||||
}
|
||||
case 'xml': {
|
||||
// Use SheetJS to produce a SpreadsheetML XML
|
||||
const out = XLSX.write(workbook, { bookType: 'xlml', type: 'string' });
|
||||
blob = new Blob([out], { type: getMimeType('xml') });
|
||||
break;
|
||||
}
|
||||
case 'html': {
|
||||
const htmlText = XLSX.utils.sheet_to_html(firstSheet);
|
||||
blob = new Blob([htmlText], { type: getMimeType('html') });
|
||||
break;
|
||||
}
|
||||
case 'txt': {
|
||||
const txtText = XLSX.utils.sheet_to_txt(firstSheet);
|
||||
blob = new Blob([txtText], { type: getMimeType('txt') });
|
||||
break;
|
||||
}
|
||||
case 'yaml':
|
||||
case 'yml': {
|
||||
const yaml = (await import('js-yaml')).default;
|
||||
const data = XLSX.utils.sheet_to_json(firstSheet);
|
||||
const yamlText = yaml.dump(data);
|
||||
blob = new Blob([yamlText], { type: getMimeType('yaml') });
|
||||
break;
|
||||
}
|
||||
case 'toml': {
|
||||
const TOML = await import('smol-toml');
|
||||
const data = XLSX.utils.sheet_to_json(firstSheet);
|
||||
// TOML needs a root object, wrap array in { rows: [...] }
|
||||
const tomlText = TOML.stringify({ rows: data } as Record<string, unknown>);
|
||||
blob = new Blob([tomlText], { type: getMimeType('toml') });
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unsupported target format for spreadsheet conversion: ${targetFormat}`);
|
||||
}
|
||||
|
||||
onProgress?.(100);
|
||||
|
||||
return {
|
||||
blob,
|
||||
filename: buildOutputFilename(file.name, targetFormat),
|
||||
};
|
||||
}
|
||||
+12
-1
@@ -6,8 +6,9 @@ const EXTENSION_MAP: Record<string, FileCategory> = {
|
||||
bmp: 'image', tiff: 'image', tif: 'image', avif: 'image', svg: 'image',
|
||||
ico: 'image',
|
||||
// Documents
|
||||
pdf: 'document', docx: 'document', doc: 'document', txt: 'document',
|
||||
pdf: 'document', docx: 'document', txt: 'document',
|
||||
md: 'document', html: 'document', htm: 'document', rtf: 'document',
|
||||
epub: 'document', pptx: 'document',
|
||||
// Audio
|
||||
mp3: 'audio', wav: 'audio', flac: 'audio', ogg: 'audio', aac: 'audio',
|
||||
m4a: 'audio', wma: 'audio', opus: 'audio',
|
||||
@@ -17,6 +18,16 @@ const EXTENSION_MAP: Record<string, FileCategory> = {
|
||||
// Data
|
||||
csv: 'data', json: 'data', xml: 'data', yaml: 'data', yml: 'data',
|
||||
tsv: 'data', toml: 'data',
|
||||
ini: 'data', env: 'data', properties: 'data',
|
||||
ndjson: 'data', jsonl: 'data', sql: 'data',
|
||||
// Spreadsheets (category: data)
|
||||
xlsx: 'data', xls: 'data', ods: 'data',
|
||||
// HEIC images
|
||||
heic: 'image', heif: 'image',
|
||||
// PSD (Photoshop)
|
||||
psd: 'image',
|
||||
// Fonts (categorized as data for routing)
|
||||
ttf: 'data', otf: 'data', woff: 'data', woff2: 'data',
|
||||
};
|
||||
|
||||
export function getExtension(filename: string): string {
|
||||
|
||||
@@ -47,6 +47,7 @@ export function getMimeType(format: string): string {
|
||||
pdf: 'application/pdf',
|
||||
docx: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
|
||||
rtf: 'application/rtf',
|
||||
epub: 'application/epub+zip',
|
||||
html: 'text/html',
|
||||
htm: 'text/html',
|
||||
txt: 'text/plain',
|
||||
@@ -58,6 +59,28 @@ export function getMimeType(format: string): string {
|
||||
yml: 'application/x-yaml',
|
||||
tsv: 'text/tab-separated-values',
|
||||
toml: 'application/toml',
|
||||
ini: 'text/plain',
|
||||
env: 'text/plain',
|
||||
properties: 'text/plain',
|
||||
ndjson: 'application/x-ndjson',
|
||||
jsonl: 'application/x-ndjson',
|
||||
sql: 'application/sql',
|
||||
// Spreadsheets
|
||||
xlsx: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
||||
xls: 'application/vnd.ms-excel',
|
||||
ods: 'application/vnd.oasis.opendocument.spreadsheet',
|
||||
// HEIC
|
||||
heic: 'image/heic',
|
||||
heif: 'image/heif',
|
||||
// PSD
|
||||
psd: 'image/vnd.adobe.photoshop',
|
||||
// Presentations
|
||||
pptx: 'application/vnd.openxmlformats-officedocument.presentationml.presentation',
|
||||
// Fonts
|
||||
ttf: 'font/ttf',
|
||||
otf: 'font/otf',
|
||||
woff: 'font/woff',
|
||||
woff2: 'font/woff2',
|
||||
};
|
||||
return mimeMap[format] || 'application/octet-stream';
|
||||
}
|
||||
|
||||
Vendored
+11
@@ -0,0 +1,11 @@
|
||||
declare module 'heic2any' {
|
||||
interface HeicOptions {
|
||||
blob: Blob;
|
||||
toType?: string;
|
||||
quality?: number;
|
||||
gifInterval?: number;
|
||||
}
|
||||
|
||||
function heic2any(options: HeicOptions): Promise<Blob | Blob[]>;
|
||||
export default heic2any;
|
||||
}
|
||||
Reference in New Issue
Block a user